In recent times, the field of agriculture has been in urgent need of modernizing, since the amount of manual work people need to put in to check if plants are growing correctly is still highly extensive. Despite several advances in agricultural technology, people working in the agricultural industry still need to have the ability to sort and recognize different plants and weeds, which takes a lot of time and effort in the long term.
The potential is ripe for this trillion-dollar industry to be greatly impacted by technological innovations that cut down on the requirement for manual labor, and this is where Artificial Intelligence can benefit the workers in this field, as the time and energy required to identify plant seedlings will be greatly shortened by the use of AI and Deep Learning. The ability to do so far more efficiently and even more effectively than experienced manual labor could lead to better crop yields, the freeing up of human involvement for higher-order agricultural decision making, and in the long term will result in more sustainable environmental practices in agriculture as well.
This dataset contains images of unique plants belonging to 12 different species. The data file names are:
Due to the large volume of data, the images were converted to numpy arrays and stored in images.npy file and the corresponding labels are also put into Labels.csv so that you can work on the data/project seamlessly without having to worry about the high data volume.
The dataset comprises of 12 plant species.
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
import os
import numpy as np # Importing numpy for Matrix Operations
import pandas as pd # Importing pandas to read CSV files
import matplotlib.pyplot as plt # Importting matplotlib for Plotting and visualizing images
import math # Importing math module to perform mathematical operations
import cv2 # Importing openCV for image processing
import seaborn as sns # Importing seaborn to plot graphs
from sklearn.utils import class_weight
from sklearn import metrics
from sklearn.model_selection import train_test_split, StratifiedKFold, cross_val_score
from sklearn.metrics import classification_report
from tensorflow.keras import backend
from sklearn.metrics import (
f1_score,
accuracy_score,
recall_score,
precision_score,
confusion_matrix,
roc_auc_score,
plot_confusion_matrix,
precision_recall_curve,
roc_curve,
make_scorer,
)
# Tensorflow modules
from sklearn.preprocessing import LabelBinarizer
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense,Dropout,Flatten,Conv2D,MaxPooling2D,BatchNormalization
from tensorflow.keras.optimizers import Adam,SGD
from keras import callbacks
from keras.callbacks import ModelCheckpoint
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
# Display images using OpenCV
from google.colab.patches import cv2_imshow
# Ignore warnings
import warnings
warnings.filterwarnings('ignore')
# Mount Google drive to access the dataset
from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
# Loading the image file of the dataset
images = np.load('/content/drive/MyDrive/Colab Notebooks/Project_images.npy')
# Loading the labels file of the dataset
labels = pd.read_csv('/content/drive/MyDrive/Colab Notebooks/Project_Labels.csv')
print(images.shape)
print(labels.shape)
(4750, 128, 128, 3) (4750, 1)
np.unique(labels)
array(['Black-grass', 'Charlock', 'Cleavers', 'Common Chickweed',
'Common wheat', 'Fat Hen', 'Loose Silky-bent', 'Maize',
'Scentless Mayweed', 'Shepherds Purse',
'Small-flowered Cranesbill', 'Sugar beet'], dtype=object)
count=labels.value_counts()
print(count)
print()
print('*'*10)
print()
count=labels.value_counts(normalize=True)
print(count)
Label Loose Silky-bent 654 Common Chickweed 611 Scentless Mayweed 516 Small-flowered Cranesbill 496 Fat Hen 475 Charlock 390 Sugar beet 385 Cleavers 287 Black-grass 263 Shepherds Purse 231 Common wheat 221 Maize 221 dtype: int64 ********** Label Loose Silky-bent 0.137684 Common Chickweed 0.128632 Scentless Mayweed 0.108632 Small-flowered Cranesbill 0.104421 Fat Hen 0.100000 Charlock 0.082105 Sugar beet 0.081053 Cleavers 0.060421 Black-grass 0.055368 Shepherds Purse 0.048632 Common wheat 0.046526 Maize 0.046526 dtype: float64
Observations:
def plot_images(images,labels1):
num_classes=12 # Number of Classes
categories=np.unique(labels1)
keys=dict(labels1['Label']) # Obtaing the unique classes from labels
rows = 5 # Defining number of rows=5
cols = 5 # Defining number of columns=5
fig = plt.figure(figsize=(20, 15)) # Defining the figure size to 20x15
for i in range(cols):
for j in range(rows):
random_index = np.random.randint(0, len(labels1)) # Generating random indices from the data and plotting the images
ax = fig.add_subplot(rows, cols, i * rows + j + 1) # Adding subplots with 5 rows and 5 columns
ax.imshow(images[random_index, :]) # Plotting the image
ax.set_title(keys[random_index])
plt.show()
plot_images(images,labels)
cv2_imshow(images[5])
plt.imshow(images[5])
<matplotlib.image.AxesImage at 0x7f3ff4297190>
Observations:
# Converting the images from BGR to RGB using cvtColor function of OpenCV
for i in range(len(images)):
images[i] = cv2.cvtColor(images[i], cv2.COLOR_BGR2RGB)
count=labels.value_counts()
print(count)
print()
print('*'*10)
print()
count=labels.value_counts(normalize=True)
print(count)
Label Loose Silky-bent 654 Common Chickweed 611 Scentless Mayweed 516 Small-flowered Cranesbill 496 Fat Hen 475 Charlock 390 Sugar beet 385 Cleavers 287 Black-grass 263 Shepherds Purse 231 Common wheat 221 Maize 221 dtype: int64 ********** Label Loose Silky-bent 0.137684 Common Chickweed 0.128632 Scentless Mayweed 0.108632 Small-flowered Cranesbill 0.104421 Fat Hen 0.100000 Charlock 0.082105 Sugar beet 0.081053 Cleavers 0.060421 Black-grass 0.055368 Shepherds Purse 0.048632 Common wheat 0.046526 Maize 0.046526 dtype: float64
Observations:
# Plotting the distribution of the classes
sns.countplot(labels['Label'])
plt.xticks(rotation='vertical')
(array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]), <a list of 12 Text major ticklabel objects>)
Inferences:
plt.imshow(images[3])
<matplotlib.image.AxesImage at 0x7f3ff1b06f10>
# Applying Gaussian Blur to denoise the images
images_gb=[]
for i in range(len(images)):
# gb[i] = cv2.cvtColor(images[i], cv2.COLOR_BGR2RGB)
images_gb.append(cv2.GaussianBlur(images[i], ksize =(3,3),sigmaX = 0))
plt.imshow(images_gb[3])
<matplotlib.image.AxesImage at 0x7f3ff11f2110>
Observations:
images_decreased=[]
height = 64
width = 64
dimensions = (width, height)
for i in range(len(images)):
images_decreased.append( cv2.resize(images_gb[i], dimensions, interpolation=cv2.INTER_LINEAR))
# Displaying few images post applying the Gaussian Blur & resizing the shape
plot_images(np.array(images_decreased), labels)
from sklearn.model_selection import train_test_split
X_temp, X_test, y_temp, y_test = train_test_split(np.array(images_decreased),labels , test_size=0.1, random_state=42,stratify=labels)
X_train, X_val, y_train, y_val = train_test_split(X_temp,y_temp , test_size=0.1, random_state=42,stratify=y_temp)
print(X_train.shape,y_train.shape)
print(X_val.shape,y_val.shape)
print(X_test.shape,y_test.shape)
(3847, 64, 64, 3) (3847, 1) (428, 64, 64, 3) (428, 1) (475, 64, 64, 3) (475, 1)
# Using Labelbinarizer to do the one hot encoding
enc = LabelBinarizer()
y_train_encoded = enc.fit_transform(y_train)
y_val_encoded=enc.transform(y_val)
y_test_encoded=enc.transform(y_test)
# Normalizing the image pixels
X_train_normalized = X_train.astype('float32')/255.0
X_val_normalized = X_val.astype('float32')/255.0
X_test_normalized = X_test.astype('float32')/255.0
print(X_train_normalized.shape,y_train_encoded.shape)
print(X_val_normalized.shape,y_val_encoded.shape)
print(X_test_normalized.shape,y_test_encoded.shape)
(3847, 64, 64, 3) (3847, 12) (428, 64, 64, 3) (428, 12) (475, 64, 64, 3) (475, 12)
labelList = np.unique(labels)
class_weights = class_weight.compute_class_weight(class_weight = "balanced", classes = np.array(labelList), y = y_train.values.reshape(-1))
class_weights = dict(zip(np.array(range(len(labelList))), class_weights))
#print calculated class weights
class_weights
{0: 1.5050860719874803,
1: 1.0145042194092826,
2: 1.3818247126436782,
3: 0.6476430976430977,
4: 1.7909683426443204,
5: 0.8348524305555556,
6: 0.604874213836478,
7: 1.7909683426443204,
8: 0.7669457735247209,
9: 1.714349376114082,
10: 0.7974709784411277,
11: 1.0275106837606838}
# defining a function to compute different metrics to check performance of a classification model built using sklearn
def model_performance_classification_sklearn_with_threshold(model, predictors, target, threshold=0.5):
y_test_pred = model.predict(predictors)
y_test_pred_classes = np.argmax(y_test_pred,axis=1)
normal_y_test = np.argmax(target,axis=1)
# Plotting the Confusion Matrix using confusion matrix() function which is also predefined tensorflow module
confusion_matrix = tf.math.confusion_matrix(normal_y_test,y_test_pred_classes)
f, ax = plt.subplots(figsize=(10, 8))
sns.heatmap(
confusion_matrix,
annot=True,
linewidths=.4,
fmt="d",
square=True,
ax=ax
)
plt.show()
acc = accuracy_score(normal_y_test, y_test_pred_classes)
recall = recall_score(normal_y_test, y_test_pred_classes, average='weighted') # to compute Recall
precision = precision_score(normal_y_test, y_test_pred_classes, average='weighted') # to compute Precision
f1 = f1_score(normal_y_test, y_test_pred_classes, average='weighted') # to compute F1-score
# creating a dataframe of metrics
df_perf = pd.DataFrame(
{
"Accuracy": acc,
"Recall": recall,
"Precision": precision,
"F1": f1,
},
index=[0],
)
return df_perf
# Clearing backend
backend.clear_session()
# Fixing the seed for random number generators
import random
np.random.seed(42)
random.seed(42)
tf.random.set_seed(42)
# Intializing a sequential model
model = Sequential()
# Adding first conv layer with 64 filters and kernel size 3x3 , padding 'same' provides the output size same as the input size
# Input_shape denotes input image dimension of MNIST images
model.add(Conv2D(126, (3, 3), activation='relu', padding="same", input_shape=(64, 64, 3)))
# Adding max pooling to reduce the size of output of first conv layer
model.add(Conv2D(64, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D((2, 2), padding = 'same'))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D((2, 2), padding = 'same'))
# flattening the output of the conv layer after max pooling to make it ready for creating dense connections
model.add(Flatten())
# Adding the output layer with 10 neurons and activation functions as softmax since this is a multi-class classification problem
model.add(Dense(12, activation='softmax'))
# Using Adam Optimizer
opt = Adam()
# Compile model
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
Observations:
history_1 = model.fit(
X_train_normalized, y_train_encoded,
epochs=50,
validation_data=(X_val_normalized,y_val_encoded),
batch_size=50,
verbose=2,
class_weight = class_weights,
)
Epoch 1/50 77/77 - 12s - loss: 2.3389 - accuracy: 0.1469 - val_loss: 1.9997 - val_accuracy: 0.2547 - 12s/epoch - 156ms/step Epoch 2/50 77/77 - 2s - loss: 1.6928 - accuracy: 0.3824 - val_loss: 1.5727 - val_accuracy: 0.3949 - 2s/epoch - 31ms/step Epoch 3/50 77/77 - 2s - loss: 1.3897 - accuracy: 0.4903 - val_loss: 1.3451 - val_accuracy: 0.5280 - 2s/epoch - 31ms/step Epoch 4/50 77/77 - 2s - loss: 1.1807 - accuracy: 0.5745 - val_loss: 1.2738 - val_accuracy: 0.5584 - 2s/epoch - 31ms/step Epoch 5/50 77/77 - 2s - loss: 1.0721 - accuracy: 0.6181 - val_loss: 1.1801 - val_accuracy: 0.6121 - 2s/epoch - 31ms/step Epoch 6/50 77/77 - 2s - loss: 0.9368 - accuracy: 0.6769 - val_loss: 1.1836 - val_accuracy: 0.5911 - 2s/epoch - 31ms/step Epoch 7/50 77/77 - 2s - loss: 0.8073 - accuracy: 0.7208 - val_loss: 1.2029 - val_accuracy: 0.6098 - 2s/epoch - 31ms/step Epoch 8/50 77/77 - 2s - loss: 0.6621 - accuracy: 0.7746 - val_loss: 1.0675 - val_accuracy: 0.6822 - 2s/epoch - 31ms/step Epoch 9/50 77/77 - 2s - loss: 0.5488 - accuracy: 0.8089 - val_loss: 1.0082 - val_accuracy: 0.7173 - 2s/epoch - 31ms/step Epoch 10/50 77/77 - 2s - loss: 0.4649 - accuracy: 0.8459 - val_loss: 1.0116 - val_accuracy: 0.7079 - 2s/epoch - 31ms/step Epoch 11/50 77/77 - 2s - loss: 0.4349 - accuracy: 0.8518 - val_loss: 1.0349 - val_accuracy: 0.7243 - 2s/epoch - 31ms/step Epoch 12/50 77/77 - 2s - loss: 0.3803 - accuracy: 0.8695 - val_loss: 1.1288 - val_accuracy: 0.7196 - 2s/epoch - 31ms/step Epoch 13/50 77/77 - 2s - loss: 0.3313 - accuracy: 0.8869 - val_loss: 1.1797 - val_accuracy: 0.6893 - 2s/epoch - 31ms/step Epoch 14/50 77/77 - 2s - loss: 0.2580 - accuracy: 0.9124 - val_loss: 1.1924 - val_accuracy: 0.7243 - 2s/epoch - 31ms/step Epoch 15/50 77/77 - 2s - loss: 0.2173 - accuracy: 0.9194 - val_loss: 1.1676 - val_accuracy: 0.7407 - 2s/epoch - 31ms/step Epoch 16/50 77/77 - 2s - loss: 0.1867 - accuracy: 0.9374 - val_loss: 1.0888 - val_accuracy: 0.7617 - 2s/epoch - 31ms/step Epoch 17/50 77/77 - 2s - loss: 0.1501 - accuracy: 0.9504 - val_loss: 1.2759 - val_accuracy: 0.7079 - 2s/epoch - 31ms/step Epoch 18/50 77/77 - 2s - loss: 0.1152 - accuracy: 0.9620 - val_loss: 1.2993 - val_accuracy: 0.7500 - 2s/epoch - 32ms/step Epoch 19/50 77/77 - 2s - loss: 0.1190 - accuracy: 0.9548 - val_loss: 1.4037 - val_accuracy: 0.7360 - 2s/epoch - 31ms/step Epoch 20/50 77/77 - 2s - loss: 0.0807 - accuracy: 0.9745 - val_loss: 1.3861 - val_accuracy: 0.7290 - 2s/epoch - 31ms/step Epoch 21/50 77/77 - 2s - loss: 0.0556 - accuracy: 0.9826 - val_loss: 1.4163 - val_accuracy: 0.7383 - 2s/epoch - 32ms/step Epoch 22/50 77/77 - 2s - loss: 0.0504 - accuracy: 0.9839 - val_loss: 1.5387 - val_accuracy: 0.7103 - 2s/epoch - 31ms/step Epoch 23/50 77/77 - 2s - loss: 0.0575 - accuracy: 0.9800 - val_loss: 1.7358 - val_accuracy: 0.6986 - 2s/epoch - 32ms/step Epoch 24/50 77/77 - 2s - loss: 0.0645 - accuracy: 0.9766 - val_loss: 1.5966 - val_accuracy: 0.7430 - 2s/epoch - 31ms/step Epoch 25/50 77/77 - 2s - loss: 0.0560 - accuracy: 0.9870 - val_loss: 1.8412 - val_accuracy: 0.6846 - 2s/epoch - 31ms/step Epoch 26/50 77/77 - 2s - loss: 0.0689 - accuracy: 0.9769 - val_loss: 1.6242 - val_accuracy: 0.7500 - 2s/epoch - 31ms/step Epoch 27/50 77/77 - 2s - loss: 0.0299 - accuracy: 0.9919 - val_loss: 1.7098 - val_accuracy: 0.7383 - 2s/epoch - 31ms/step Epoch 28/50 77/77 - 2s - loss: 0.0628 - accuracy: 0.9792 - val_loss: 2.0160 - val_accuracy: 0.6869 - 2s/epoch - 31ms/step Epoch 29/50 77/77 - 2s - loss: 0.1412 - accuracy: 0.9504 - val_loss: 1.7026 - val_accuracy: 0.6963 - 2s/epoch - 31ms/step Epoch 30/50 77/77 - 2s - loss: 0.0348 - accuracy: 0.9896 - val_loss: 1.6733 - val_accuracy: 0.7383 - 2s/epoch - 32ms/step Epoch 31/50 77/77 - 2s - loss: 0.0121 - accuracy: 0.9982 - val_loss: 1.7460 - val_accuracy: 0.7500 - 2s/epoch - 31ms/step Epoch 32/50 77/77 - 2s - loss: 0.0059 - accuracy: 0.9995 - val_loss: 1.8167 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 33/50 77/77 - 2s - loss: 0.0038 - accuracy: 1.0000 - val_loss: 1.8397 - val_accuracy: 0.7453 - 2s/epoch - 31ms/step Epoch 34/50 77/77 - 2s - loss: 0.0031 - accuracy: 1.0000 - val_loss: 1.8952 - val_accuracy: 0.7500 - 2s/epoch - 31ms/step Epoch 35/50 77/77 - 2s - loss: 0.0024 - accuracy: 1.0000 - val_loss: 1.9336 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step Epoch 36/50 77/77 - 2s - loss: 0.0022 - accuracy: 1.0000 - val_loss: 1.9575 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step Epoch 37/50 77/77 - 3s - loss: 0.0018 - accuracy: 1.0000 - val_loss: 1.9808 - val_accuracy: 0.7453 - 3s/epoch - 33ms/step Epoch 38/50 77/77 - 2s - loss: 0.0016 - accuracy: 1.0000 - val_loss: 2.0010 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step Epoch 39/50 77/77 - 2s - loss: 0.0015 - accuracy: 1.0000 - val_loss: 2.0380 - val_accuracy: 0.7453 - 2s/epoch - 31ms/step Epoch 40/50 77/77 - 2s - loss: 0.0013 - accuracy: 1.0000 - val_loss: 2.0738 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step Epoch 41/50 77/77 - 2s - loss: 0.0011 - accuracy: 1.0000 - val_loss: 2.0776 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step Epoch 42/50 77/77 - 2s - loss: 0.0010 - accuracy: 1.0000 - val_loss: 2.1199 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 43/50 77/77 - 2s - loss: 9.3207e-04 - accuracy: 1.0000 - val_loss: 2.1357 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step Epoch 44/50 77/77 - 2s - loss: 8.6076e-04 - accuracy: 1.0000 - val_loss: 2.1517 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 45/50 77/77 - 2s - loss: 7.8639e-04 - accuracy: 1.0000 - val_loss: 2.1800 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 46/50 77/77 - 2s - loss: 7.3427e-04 - accuracy: 1.0000 - val_loss: 2.1900 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 47/50 77/77 - 2s - loss: 6.7935e-04 - accuracy: 1.0000 - val_loss: 2.2172 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 48/50 77/77 - 2s - loss: 6.4884e-04 - accuracy: 1.0000 - val_loss: 2.2558 - val_accuracy: 0.7500 - 2s/epoch - 31ms/step Epoch 49/50 77/77 - 2s - loss: 5.8926e-04 - accuracy: 1.0000 - val_loss: 2.2428 - val_accuracy: 0.7477 - 2s/epoch - 32ms/step Epoch 50/50 77/77 - 2s - loss: 5.4974e-04 - accuracy: 1.0000 - val_loss: 2.2728 - val_accuracy: 0.7453 - 2s/epoch - 32ms/step
plt.plot(history_1.history['accuracy'])
plt.plot(history_1.history['val_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.show()
Observations:
model.evaluate(X_train_normalized, y_train_encoded, verbose=1)
model0_TrainDF = model_performance_classification_sklearn_with_threshold(
model, X_train_normalized, y_train_encoded,
)
model0_TrainDF
121/121 [==============================] - 1s 10ms/step - loss: 5.0907e-04 - accuracy: 1.0000 121/121 [==============================] - 1s 7ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 1.0 | 1.0 | 1.0 | 1.0 |
Observations
model.evaluate(X_val_normalized, y_val_encoded, verbose=1)
model0_ValDF = model_performance_classification_sklearn_with_threshold(
model, X_val_normalized,y_val_encoded,
)
model0_ValDF
14/14 [==============================] - 0s 19ms/step - loss: 2.2728 - accuracy: 0.7453 14/14 [==============================] - 0s 7ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.745327 | 0.745327 | 0.736978 | 0.738901 |
Observations
model.evaluate(X_test_normalized, y_test_encoded, verbose=2)
model0_TestDF = model_performance_classification_sklearn_with_threshold(model, X_test_normalized, y_test_encoded)
model0_TestDF
15/15 - 0s - loss: 2.4591 - accuracy: 0.7326 - 317ms/epoch - 21ms/step 15/15 [==============================] - 0s 7ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.732632 | 0.732632 | 0.729495 | 0.72711 |
Observations:
# Visualizing the predicted and correct label of images from test data
rows = 1
cols = 5
for i in range(cols):
for j in range(rows):
random_index = np.random.randint(0, 400)
plt.imshow(X_test[random_index])
plt.show()
print('Predicted Label', enc.inverse_transform(model.predict((X_test_normalized[random_index].reshape(1,64,64,3)))))
print('True Label', enc.inverse_transform(y_test_encoded)[random_index])
1/1 [==============================] - 0s 103ms/step Predicted Label ['Sugar beet'] True Label Sugar beet
1/1 [==============================] - 0s 16ms/step Predicted Label ['Loose Silky-bent'] True Label Loose Silky-bent
1/1 [==============================] - 0s 16ms/step Predicted Label ['Loose Silky-bent'] True Label Loose Silky-bent
1/1 [==============================] - 0s 17ms/step Predicted Label ['Scentless Mayweed'] True Label Scentless Mayweed
1/1 [==============================] - 0s 22ms/step Predicted Label ['Small-flowered Cranesbill'] True Label Small-flowered Cranesbill
Inference:
# Clearing backend
backend.clear_session()
# Fixing the seed for random number generators
import random
np.random.seed(42)
random.seed(42)
tf.random.set_seed(42)
# Intializing a sequential model
model = Sequential()
# Adding first conv layer with 64 filters and kernel size 3x3 , padding 'same' provides the output size same as the input size
# Input_shape denotes input image dimension of images
model.add(Conv2D(64, (3, 3), activation='relu', padding="same", input_shape=(64, 64, 3)))
# Adding max pooling to reduce the size of output of first conv layer
model.add(MaxPooling2D(2, 2))
model.add(BatchNormalization())
model.add(Conv2D(64, (3, 3), activation='relu', padding="same"))
model.add(BatchNormalization())
model.add(MaxPooling2D(2, 2))
model.add(Dense(32, activation='relu'))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D(2, 2))
model.add(Dropout(0.3))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(Dense(32, activation='relu'))
model.add(MaxPooling2D(2, 2))
model.add(Dropout(0.3))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D(2, 2))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D(2, 2))
model.add(BatchNormalization())
# flattening the output of the conv layer after max pooling to make it ready for creating dense connections
model.add(Flatten())
# Adding a fully connected dense layer with 100 neurons
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.3))
# Adding a fully connected dense layer with 100 neurons
model.add(Dense(32, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(32, activation='relu'))
# Adding the output layer with 12 neurons and activation functions as softmax since this is a multi-class classification problem
model.add(Dense(12, activation='softmax'))
# Using Adam Optimizer
opt=Adam()
# Compile model
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
# Generating the summary of the model
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 64) 1792
max_pooling2d (MaxPooling2D (None, 32, 32, 64) 0
)
batch_normalization (BatchN (None, 32, 32, 64) 256
ormalization)
conv2d_1 (Conv2D) (None, 32, 32, 64) 36928
batch_normalization_1 (Batc (None, 32, 32, 64) 256
hNormalization)
max_pooling2d_1 (MaxPooling (None, 16, 16, 64) 0
2D)
dense (Dense) (None, 16, 16, 32) 2080
conv2d_2 (Conv2D) (None, 16, 16, 32) 9248
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout (Dropout) (None, 8, 8, 32) 0
conv2d_3 (Conv2D) (None, 8, 8, 32) 9248
dense_1 (Dense) (None, 8, 8, 32) 1056
max_pooling2d_3 (MaxPooling (None, 4, 4, 32) 0
2D)
dropout_1 (Dropout) (None, 4, 4, 32) 0
conv2d_4 (Conv2D) (None, 4, 4, 32) 9248
max_pooling2d_4 (MaxPooling (None, 2, 2, 32) 0
2D)
conv2d_5 (Conv2D) (None, 2, 2, 32) 9248
max_pooling2d_5 (MaxPooling (None, 1, 1, 32) 0
2D)
batch_normalization_2 (Batc (None, 1, 1, 32) 128
hNormalization)
flatten (Flatten) (None, 32) 0
dense_2 (Dense) (None, 64) 2112
dropout_2 (Dropout) (None, 64) 0
dense_3 (Dense) (None, 32) 2080
dropout_3 (Dropout) (None, 32) 0
dense_4 (Dense) (None, 32) 1056
dense_5 (Dense) (None, 12) 396
=================================================================
Total params: 85,132
Trainable params: 84,812
Non-trainable params: 320
_________________________________________________________________
Observations:
history_1 = model.fit(
X_train_normalized, y_train_encoded,
epochs=200,
validation_data=(X_val_normalized,y_val_encoded),
batch_size=50,
verbose=2,
class_weight = class_weights,
)
Epoch 1/200 77/77 - 4s - loss: 2.5024 - accuracy: 0.0876 - val_loss: 2.4939 - val_accuracy: 0.0467 - 4s/epoch - 48ms/step Epoch 2/200 77/77 - 1s - loss: 2.3346 - accuracy: 0.1432 - val_loss: 2.8157 - val_accuracy: 0.1332 - 1s/epoch - 15ms/step Epoch 3/200 77/77 - 1s - loss: 1.9805 - accuracy: 0.2441 - val_loss: 4.1275 - val_accuracy: 0.1379 - 1s/epoch - 15ms/step Epoch 4/200 77/77 - 1s - loss: 1.7452 - accuracy: 0.3091 - val_loss: 3.0697 - val_accuracy: 0.0654 - 1s/epoch - 15ms/step Epoch 5/200 77/77 - 1s - loss: 1.6754 - accuracy: 0.3442 - val_loss: 4.2307 - val_accuracy: 0.0584 - 1s/epoch - 15ms/step Epoch 6/200 77/77 - 1s - loss: 1.5965 - accuracy: 0.3717 - val_loss: 2.8036 - val_accuracy: 0.1939 - 1s/epoch - 15ms/step Epoch 7/200 77/77 - 1s - loss: 1.5088 - accuracy: 0.4047 - val_loss: 2.0689 - val_accuracy: 0.2921 - 1s/epoch - 15ms/step Epoch 8/200 77/77 - 1s - loss: 1.4395 - accuracy: 0.4409 - val_loss: 1.6932 - val_accuracy: 0.3785 - 1s/epoch - 15ms/step Epoch 9/200 77/77 - 1s - loss: 1.4033 - accuracy: 0.4435 - val_loss: 4.4969 - val_accuracy: 0.0794 - 1s/epoch - 15ms/step Epoch 10/200 77/77 - 1s - loss: 1.3644 - accuracy: 0.4549 - val_loss: 2.2258 - val_accuracy: 0.3037 - 1s/epoch - 15ms/step Epoch 11/200 77/77 - 1s - loss: 1.3183 - accuracy: 0.4760 - val_loss: 1.6239 - val_accuracy: 0.4252 - 1s/epoch - 15ms/step Epoch 12/200 77/77 - 1s - loss: 1.2908 - accuracy: 0.4936 - val_loss: 2.6793 - val_accuracy: 0.2220 - 1s/epoch - 15ms/step Epoch 13/200 77/77 - 1s - loss: 1.2478 - accuracy: 0.5188 - val_loss: 1.8548 - val_accuracy: 0.4322 - 1s/epoch - 15ms/step Epoch 14/200 77/77 - 1s - loss: 1.1699 - accuracy: 0.5537 - val_loss: 1.9590 - val_accuracy: 0.3551 - 1s/epoch - 15ms/step Epoch 15/200 77/77 - 1s - loss: 1.1976 - accuracy: 0.5532 - val_loss: 1.0908 - val_accuracy: 0.5561 - 1s/epoch - 15ms/step Epoch 16/200 77/77 - 1s - loss: 1.1502 - accuracy: 0.5623 - val_loss: 1.2331 - val_accuracy: 0.5257 - 1s/epoch - 16ms/step Epoch 17/200 77/77 - 1s - loss: 1.1101 - accuracy: 0.5711 - val_loss: 1.2543 - val_accuracy: 0.4977 - 1s/epoch - 15ms/step Epoch 18/200 77/77 - 1s - loss: 1.1053 - accuracy: 0.5703 - val_loss: 1.1487 - val_accuracy: 0.6028 - 1s/epoch - 15ms/step Epoch 19/200 77/77 - 1s - loss: 1.0982 - accuracy: 0.5950 - val_loss: 2.0382 - val_accuracy: 0.4042 - 1s/epoch - 15ms/step Epoch 20/200 77/77 - 1s - loss: 1.0326 - accuracy: 0.6218 - val_loss: 1.7301 - val_accuracy: 0.4579 - 1s/epoch - 15ms/step Epoch 21/200 77/77 - 1s - loss: 1.0022 - accuracy: 0.6369 - val_loss: 0.9595 - val_accuracy: 0.6612 - 1s/epoch - 15ms/step Epoch 22/200 77/77 - 1s - loss: 0.9375 - accuracy: 0.6356 - val_loss: 1.2293 - val_accuracy: 0.5911 - 1s/epoch - 15ms/step Epoch 23/200 77/77 - 1s - loss: 0.9104 - accuracy: 0.6551 - val_loss: 1.2346 - val_accuracy: 0.5584 - 1s/epoch - 15ms/step Epoch 24/200 77/77 - 1s - loss: 0.9008 - accuracy: 0.6579 - val_loss: 0.8256 - val_accuracy: 0.7430 - 1s/epoch - 15ms/step Epoch 25/200 77/77 - 1s - loss: 0.8675 - accuracy: 0.6795 - val_loss: 1.7327 - val_accuracy: 0.5491 - 1s/epoch - 15ms/step Epoch 26/200 77/77 - 1s - loss: 0.8419 - accuracy: 0.6800 - val_loss: 0.8968 - val_accuracy: 0.7079 - 1s/epoch - 15ms/step Epoch 27/200 77/77 - 1s - loss: 0.8221 - accuracy: 0.7024 - val_loss: 1.2312 - val_accuracy: 0.6215 - 1s/epoch - 15ms/step Epoch 28/200 77/77 - 1s - loss: 0.8289 - accuracy: 0.6953 - val_loss: 1.2880 - val_accuracy: 0.6238 - 1s/epoch - 15ms/step Epoch 29/200 77/77 - 1s - loss: 0.7696 - accuracy: 0.7070 - val_loss: 1.4673 - val_accuracy: 0.5818 - 1s/epoch - 16ms/step Epoch 30/200 77/77 - 1s - loss: 0.7763 - accuracy: 0.7224 - val_loss: 1.4805 - val_accuracy: 0.5701 - 1s/epoch - 15ms/step Epoch 31/200 77/77 - 1s - loss: 0.7190 - accuracy: 0.7289 - val_loss: 1.0720 - val_accuracy: 0.5935 - 1s/epoch - 15ms/step Epoch 32/200 77/77 - 1s - loss: 0.7098 - accuracy: 0.7393 - val_loss: 0.7571 - val_accuracy: 0.7850 - 1s/epoch - 15ms/step Epoch 33/200 77/77 - 1s - loss: 0.7107 - accuracy: 0.7234 - val_loss: 1.0457 - val_accuracy: 0.6939 - 1s/epoch - 15ms/step Epoch 34/200 77/77 - 1s - loss: 0.6882 - accuracy: 0.7421 - val_loss: 0.7006 - val_accuracy: 0.7383 - 1s/epoch - 15ms/step Epoch 35/200 77/77 - 1s - loss: 0.6489 - accuracy: 0.7463 - val_loss: 1.3067 - val_accuracy: 0.6028 - 1s/epoch - 15ms/step Epoch 36/200 77/77 - 1s - loss: 0.6466 - accuracy: 0.7554 - val_loss: 1.4088 - val_accuracy: 0.6098 - 1s/epoch - 15ms/step Epoch 37/200 77/77 - 1s - loss: 0.6449 - accuracy: 0.7702 - val_loss: 0.9264 - val_accuracy: 0.6752 - 1s/epoch - 15ms/step Epoch 38/200 77/77 - 1s - loss: 0.6389 - accuracy: 0.7668 - val_loss: 1.4368 - val_accuracy: 0.5911 - 1s/epoch - 15ms/step Epoch 39/200 77/77 - 1s - loss: 0.5832 - accuracy: 0.7842 - val_loss: 1.7026 - val_accuracy: 0.5444 - 1s/epoch - 15ms/step Epoch 40/200 77/77 - 1s - loss: 0.5917 - accuracy: 0.7814 - val_loss: 0.7423 - val_accuracy: 0.7664 - 1s/epoch - 15ms/step Epoch 41/200 77/77 - 1s - loss: 0.5714 - accuracy: 0.7881 - val_loss: 1.3741 - val_accuracy: 0.5514 - 1s/epoch - 15ms/step Epoch 42/200 77/77 - 1s - loss: 0.5857 - accuracy: 0.7790 - val_loss: 1.8612 - val_accuracy: 0.5023 - 1s/epoch - 15ms/step Epoch 43/200 77/77 - 1s - loss: 0.5926 - accuracy: 0.7819 - val_loss: 0.7455 - val_accuracy: 0.7617 - 1s/epoch - 15ms/step Epoch 44/200 77/77 - 1s - loss: 0.5561 - accuracy: 0.8022 - val_loss: 1.0182 - val_accuracy: 0.7290 - 1s/epoch - 15ms/step Epoch 45/200 77/77 - 1s - loss: 0.5458 - accuracy: 0.7985 - val_loss: 0.8161 - val_accuracy: 0.6986 - 1s/epoch - 15ms/step Epoch 46/200 77/77 - 1s - loss: 0.5195 - accuracy: 0.8089 - val_loss: 0.8260 - val_accuracy: 0.7453 - 1s/epoch - 15ms/step Epoch 47/200 77/77 - 1s - loss: 0.5361 - accuracy: 0.8040 - val_loss: 0.6681 - val_accuracy: 0.8061 - 1s/epoch - 15ms/step Epoch 48/200 77/77 - 1s - loss: 0.4772 - accuracy: 0.8225 - val_loss: 0.7559 - val_accuracy: 0.7967 - 1s/epoch - 15ms/step Epoch 49/200 77/77 - 1s - loss: 0.4931 - accuracy: 0.8193 - val_loss: 1.0998 - val_accuracy: 0.6822 - 1s/epoch - 15ms/step Epoch 50/200 77/77 - 1s - loss: 0.5156 - accuracy: 0.8121 - val_loss: 0.6803 - val_accuracy: 0.7804 - 1s/epoch - 15ms/step Epoch 51/200 77/77 - 1s - loss: 0.4929 - accuracy: 0.8128 - val_loss: 1.1960 - val_accuracy: 0.6752 - 1s/epoch - 15ms/step Epoch 52/200 77/77 - 1s - loss: 0.4695 - accuracy: 0.8274 - val_loss: 1.7797 - val_accuracy: 0.6168 - 1s/epoch - 15ms/step Epoch 53/200 77/77 - 1s - loss: 0.4665 - accuracy: 0.8284 - val_loss: 0.8017 - val_accuracy: 0.7547 - 1s/epoch - 15ms/step Epoch 54/200 77/77 - 1s - loss: 0.4580 - accuracy: 0.8209 - val_loss: 0.7190 - val_accuracy: 0.7874 - 1s/epoch - 15ms/step Epoch 55/200 77/77 - 1s - loss: 0.4599 - accuracy: 0.8282 - val_loss: 0.9497 - val_accuracy: 0.7220 - 1s/epoch - 15ms/step Epoch 56/200 77/77 - 1s - loss: 0.4442 - accuracy: 0.8404 - val_loss: 1.7456 - val_accuracy: 0.5654 - 1s/epoch - 15ms/step Epoch 57/200 77/77 - 1s - loss: 0.4515 - accuracy: 0.8329 - val_loss: 0.8344 - val_accuracy: 0.7383 - 1s/epoch - 15ms/step Epoch 58/200 77/77 - 1s - loss: 0.4519 - accuracy: 0.8420 - val_loss: 0.8893 - val_accuracy: 0.7220 - 1s/epoch - 15ms/step Epoch 59/200 77/77 - 1s - loss: 0.4239 - accuracy: 0.8469 - val_loss: 1.9049 - val_accuracy: 0.4930 - 1s/epoch - 15ms/step Epoch 60/200 77/77 - 1s - loss: 0.4422 - accuracy: 0.8342 - val_loss: 1.7239 - val_accuracy: 0.6472 - 1s/epoch - 15ms/step Epoch 61/200 77/77 - 1s - loss: 0.4111 - accuracy: 0.8495 - val_loss: 0.8014 - val_accuracy: 0.7617 - 1s/epoch - 15ms/step Epoch 62/200 77/77 - 1s - loss: 0.4278 - accuracy: 0.8425 - val_loss: 0.6305 - val_accuracy: 0.7991 - 1s/epoch - 15ms/step Epoch 63/200 77/77 - 1s - loss: 0.4498 - accuracy: 0.8427 - val_loss: 1.1664 - val_accuracy: 0.6986 - 1s/epoch - 15ms/step Epoch 64/200 77/77 - 1s - loss: 0.4274 - accuracy: 0.8485 - val_loss: 1.4136 - val_accuracy: 0.5981 - 1s/epoch - 15ms/step Epoch 65/200 77/77 - 1s - loss: 0.4075 - accuracy: 0.8586 - val_loss: 0.6253 - val_accuracy: 0.8248 - 1s/epoch - 15ms/step Epoch 66/200 77/77 - 1s - loss: 0.4347 - accuracy: 0.8537 - val_loss: 1.9132 - val_accuracy: 0.5584 - 1s/epoch - 15ms/step Epoch 67/200 77/77 - 1s - loss: 0.3731 - accuracy: 0.8594 - val_loss: 1.2162 - val_accuracy: 0.6986 - 1s/epoch - 15ms/step Epoch 68/200 77/77 - 1s - loss: 0.3904 - accuracy: 0.8591 - val_loss: 0.7103 - val_accuracy: 0.7874 - 1s/epoch - 15ms/step Epoch 69/200 77/77 - 1s - loss: 0.3966 - accuracy: 0.8602 - val_loss: 1.4731 - val_accuracy: 0.6215 - 1s/epoch - 15ms/step Epoch 70/200 77/77 - 1s - loss: 0.3980 - accuracy: 0.8602 - val_loss: 0.6154 - val_accuracy: 0.8505 - 1s/epoch - 15ms/step Epoch 71/200 77/77 - 1s - loss: 0.3915 - accuracy: 0.8628 - val_loss: 0.6897 - val_accuracy: 0.8084 - 1s/epoch - 15ms/step Epoch 72/200 77/77 - 1s - loss: 0.3548 - accuracy: 0.8656 - val_loss: 0.6817 - val_accuracy: 0.8107 - 1s/epoch - 15ms/step Epoch 73/200 77/77 - 1s - loss: 0.3566 - accuracy: 0.8721 - val_loss: 1.6673 - val_accuracy: 0.5771 - 1s/epoch - 15ms/step Epoch 74/200 77/77 - 1s - loss: 0.3782 - accuracy: 0.8602 - val_loss: 0.6042 - val_accuracy: 0.8271 - 1s/epoch - 15ms/step Epoch 75/200 77/77 - 1s - loss: 0.3679 - accuracy: 0.8705 - val_loss: 1.2240 - val_accuracy: 0.6472 - 1s/epoch - 15ms/step Epoch 76/200 77/77 - 1s - loss: 0.3430 - accuracy: 0.8789 - val_loss: 0.7519 - val_accuracy: 0.7827 - 1s/epoch - 15ms/step Epoch 77/200 77/77 - 1s - loss: 0.3494 - accuracy: 0.8765 - val_loss: 1.9834 - val_accuracy: 0.5537 - 1s/epoch - 15ms/step Epoch 78/200 77/77 - 1s - loss: 0.3077 - accuracy: 0.8867 - val_loss: 0.9887 - val_accuracy: 0.7383 - 1s/epoch - 15ms/step Epoch 79/200 77/77 - 1s - loss: 0.3367 - accuracy: 0.8812 - val_loss: 1.3043 - val_accuracy: 0.6355 - 1s/epoch - 15ms/step Epoch 80/200 77/77 - 1s - loss: 0.3372 - accuracy: 0.8752 - val_loss: 0.6806 - val_accuracy: 0.8154 - 1s/epoch - 15ms/step Epoch 81/200 77/77 - 1s - loss: 0.3050 - accuracy: 0.8898 - val_loss: 0.6316 - val_accuracy: 0.8388 - 1s/epoch - 15ms/step Epoch 82/200 77/77 - 1s - loss: 0.3155 - accuracy: 0.8838 - val_loss: 0.5322 - val_accuracy: 0.8411 - 1s/epoch - 15ms/step Epoch 83/200 77/77 - 1s - loss: 0.3390 - accuracy: 0.8791 - val_loss: 1.0878 - val_accuracy: 0.7640 - 1s/epoch - 15ms/step Epoch 84/200 77/77 - 1s - loss: 0.3305 - accuracy: 0.8802 - val_loss: 0.6812 - val_accuracy: 0.8037 - 1s/epoch - 15ms/step Epoch 85/200 77/77 - 1s - loss: 0.3236 - accuracy: 0.8828 - val_loss: 4.2939 - val_accuracy: 0.4019 - 1s/epoch - 15ms/step Epoch 86/200 77/77 - 1s - loss: 0.3104 - accuracy: 0.8841 - val_loss: 0.7972 - val_accuracy: 0.8014 - 1s/epoch - 15ms/step Epoch 87/200 77/77 - 1s - loss: 0.3623 - accuracy: 0.8768 - val_loss: 3.0438 - val_accuracy: 0.4836 - 1s/epoch - 15ms/step Epoch 88/200 77/77 - 1s - loss: 0.2966 - accuracy: 0.8986 - val_loss: 0.7261 - val_accuracy: 0.7967 - 1s/epoch - 15ms/step Epoch 89/200 77/77 - 1s - loss: 0.2962 - accuracy: 0.8963 - val_loss: 1.0824 - val_accuracy: 0.7196 - 1s/epoch - 15ms/step Epoch 90/200 77/77 - 1s - loss: 0.2904 - accuracy: 0.8960 - val_loss: 0.6361 - val_accuracy: 0.8037 - 1s/epoch - 15ms/step Epoch 91/200 77/77 - 1s - loss: 0.2874 - accuracy: 0.8999 - val_loss: 0.8551 - val_accuracy: 0.7593 - 1s/epoch - 15ms/step Epoch 92/200 77/77 - 1s - loss: 0.2576 - accuracy: 0.9051 - val_loss: 1.0756 - val_accuracy: 0.7243 - 1s/epoch - 15ms/step Epoch 93/200 77/77 - 1s - loss: 0.2972 - accuracy: 0.9030 - val_loss: 1.5211 - val_accuracy: 0.6636 - 1s/epoch - 15ms/step Epoch 94/200 77/77 - 1s - loss: 0.2781 - accuracy: 0.8997 - val_loss: 1.7961 - val_accuracy: 0.6238 - 1s/epoch - 15ms/step Epoch 95/200 77/77 - 1s - loss: 0.2750 - accuracy: 0.9038 - val_loss: 1.7117 - val_accuracy: 0.6215 - 1s/epoch - 15ms/step Epoch 96/200 77/77 - 1s - loss: 0.2730 - accuracy: 0.9085 - val_loss: 0.6396 - val_accuracy: 0.8154 - 1s/epoch - 15ms/step Epoch 97/200 77/77 - 1s - loss: 0.2839 - accuracy: 0.8991 - val_loss: 0.7160 - val_accuracy: 0.8248 - 1s/epoch - 15ms/step Epoch 98/200 77/77 - 1s - loss: 0.2655 - accuracy: 0.9114 - val_loss: 0.9428 - val_accuracy: 0.7687 - 1s/epoch - 15ms/step Epoch 99/200 77/77 - 1s - loss: 0.2503 - accuracy: 0.9127 - val_loss: 0.7036 - val_accuracy: 0.8248 - 1s/epoch - 15ms/step Epoch 100/200 77/77 - 1s - loss: 0.2886 - accuracy: 0.9015 - val_loss: 0.5662 - val_accuracy: 0.8621 - 1s/epoch - 15ms/step Epoch 101/200 77/77 - 1s - loss: 0.2946 - accuracy: 0.9012 - val_loss: 0.7684 - val_accuracy: 0.7921 - 1s/epoch - 15ms/step Epoch 102/200 77/77 - 1s - loss: 0.2771 - accuracy: 0.9072 - val_loss: 1.5780 - val_accuracy: 0.6285 - 1s/epoch - 15ms/step Epoch 103/200 77/77 - 1s - loss: 0.2671 - accuracy: 0.9067 - val_loss: 1.3254 - val_accuracy: 0.7173 - 1s/epoch - 15ms/step Epoch 104/200 77/77 - 1s - loss: 0.2800 - accuracy: 0.9051 - val_loss: 0.7779 - val_accuracy: 0.7991 - 1s/epoch - 15ms/step Epoch 105/200 77/77 - 1s - loss: 0.2565 - accuracy: 0.9127 - val_loss: 0.6432 - val_accuracy: 0.8318 - 1s/epoch - 15ms/step Epoch 106/200 77/77 - 1s - loss: 0.2405 - accuracy: 0.9228 - val_loss: 0.8439 - val_accuracy: 0.8084 - 1s/epoch - 15ms/step Epoch 107/200 77/77 - 1s - loss: 0.2205 - accuracy: 0.9233 - val_loss: 0.5514 - val_accuracy: 0.8411 - 1s/epoch - 15ms/step Epoch 108/200 77/77 - 1s - loss: 0.2541 - accuracy: 0.9192 - val_loss: 0.4373 - val_accuracy: 0.8738 - 1s/epoch - 15ms/step Epoch 109/200 77/77 - 1s - loss: 0.2216 - accuracy: 0.9181 - val_loss: 1.0460 - val_accuracy: 0.7220 - 1s/epoch - 15ms/step Epoch 110/200 77/77 - 1s - loss: 0.2313 - accuracy: 0.9171 - val_loss: 1.4558 - val_accuracy: 0.6986 - 1s/epoch - 15ms/step Epoch 111/200 77/77 - 1s - loss: 0.2573 - accuracy: 0.9114 - val_loss: 2.7137 - val_accuracy: 0.5748 - 1s/epoch - 15ms/step Epoch 112/200 77/77 - 1s - loss: 0.2640 - accuracy: 0.9116 - val_loss: 0.8822 - val_accuracy: 0.7757 - 1s/epoch - 15ms/step Epoch 113/200 77/77 - 1s - loss: 0.2680 - accuracy: 0.9108 - val_loss: 0.4436 - val_accuracy: 0.9042 - 1s/epoch - 15ms/step Epoch 114/200 77/77 - 1s - loss: 0.2383 - accuracy: 0.9158 - val_loss: 1.7173 - val_accuracy: 0.6495 - 1s/epoch - 15ms/step Epoch 115/200 77/77 - 1s - loss: 0.2203 - accuracy: 0.9225 - val_loss: 0.6289 - val_accuracy: 0.8294 - 1s/epoch - 15ms/step Epoch 116/200 77/77 - 1s - loss: 0.2189 - accuracy: 0.9272 - val_loss: 0.4671 - val_accuracy: 0.8785 - 1s/epoch - 15ms/step Epoch 117/200 77/77 - 1s - loss: 0.2645 - accuracy: 0.9108 - val_loss: 0.9991 - val_accuracy: 0.7453 - 1s/epoch - 15ms/step Epoch 118/200 77/77 - 1s - loss: 0.2070 - accuracy: 0.9264 - val_loss: 0.7728 - val_accuracy: 0.8178 - 1s/epoch - 15ms/step Epoch 119/200 77/77 - 1s - loss: 0.2480 - accuracy: 0.9223 - val_loss: 0.6796 - val_accuracy: 0.8248 - 1s/epoch - 15ms/step Epoch 120/200 77/77 - 1s - loss: 0.2467 - accuracy: 0.9153 - val_loss: 1.0121 - val_accuracy: 0.7687 - 1s/epoch - 15ms/step Epoch 121/200 77/77 - 1s - loss: 0.2795 - accuracy: 0.9010 - val_loss: 0.7508 - val_accuracy: 0.8131 - 1s/epoch - 15ms/step Epoch 122/200 77/77 - 1s - loss: 0.2205 - accuracy: 0.9254 - val_loss: 0.6970 - val_accuracy: 0.8061 - 1s/epoch - 15ms/step Epoch 123/200 77/77 - 1s - loss: 0.2321 - accuracy: 0.9272 - val_loss: 0.6723 - val_accuracy: 0.7944 - 1s/epoch - 15ms/step Epoch 124/200 77/77 - 1s - loss: 0.2293 - accuracy: 0.9272 - val_loss: 0.4515 - val_accuracy: 0.8528 - 1s/epoch - 15ms/step Epoch 125/200 77/77 - 1s - loss: 0.2092 - accuracy: 0.9277 - val_loss: 0.5681 - val_accuracy: 0.8341 - 1s/epoch - 15ms/step Epoch 126/200 77/77 - 1s - loss: 0.2009 - accuracy: 0.9314 - val_loss: 0.9973 - val_accuracy: 0.7407 - 1s/epoch - 15ms/step Epoch 127/200 77/77 - 1s - loss: 0.2339 - accuracy: 0.9272 - val_loss: 1.4975 - val_accuracy: 0.6565 - 1s/epoch - 15ms/step Epoch 128/200 77/77 - 1s - loss: 0.1949 - accuracy: 0.9306 - val_loss: 0.7018 - val_accuracy: 0.8294 - 1s/epoch - 15ms/step Epoch 129/200 77/77 - 1s - loss: 0.2036 - accuracy: 0.9363 - val_loss: 0.8387 - val_accuracy: 0.8037 - 1s/epoch - 15ms/step Epoch 130/200 77/77 - 1s - loss: 0.1865 - accuracy: 0.9407 - val_loss: 0.9757 - val_accuracy: 0.7290 - 1s/epoch - 15ms/step Epoch 131/200 77/77 - 1s - loss: 0.1874 - accuracy: 0.9379 - val_loss: 0.8847 - val_accuracy: 0.7804 - 1s/epoch - 15ms/step Epoch 132/200 77/77 - 1s - loss: 0.1997 - accuracy: 0.9335 - val_loss: 0.9550 - val_accuracy: 0.7734 - 1s/epoch - 15ms/step Epoch 133/200 77/77 - 1s - loss: 0.2434 - accuracy: 0.9270 - val_loss: 0.8541 - val_accuracy: 0.7617 - 1s/epoch - 15ms/step Epoch 134/200 77/77 - 1s - loss: 0.2299 - accuracy: 0.9301 - val_loss: 1.7481 - val_accuracy: 0.6519 - 1s/epoch - 15ms/step Epoch 135/200 77/77 - 1s - loss: 0.1749 - accuracy: 0.9441 - val_loss: 0.9120 - val_accuracy: 0.7780 - 1s/epoch - 15ms/step Epoch 136/200 77/77 - 1s - loss: 0.2010 - accuracy: 0.9337 - val_loss: 0.5975 - val_accuracy: 0.8294 - 1s/epoch - 15ms/step Epoch 137/200 77/77 - 1s - loss: 0.1928 - accuracy: 0.9358 - val_loss: 0.6672 - val_accuracy: 0.8107 - 1s/epoch - 15ms/step Epoch 138/200 77/77 - 1s - loss: 0.1978 - accuracy: 0.9353 - val_loss: 0.5261 - val_accuracy: 0.8505 - 1s/epoch - 15ms/step Epoch 139/200 77/77 - 1s - loss: 0.1645 - accuracy: 0.9480 - val_loss: 0.3613 - val_accuracy: 0.9042 - 1s/epoch - 15ms/step Epoch 140/200 77/77 - 1s - loss: 0.2189 - accuracy: 0.9345 - val_loss: 0.9176 - val_accuracy: 0.7664 - 1s/epoch - 15ms/step Epoch 141/200 77/77 - 1s - loss: 0.1797 - accuracy: 0.9452 - val_loss: 1.0065 - val_accuracy: 0.7430 - 1s/epoch - 15ms/step Epoch 142/200 77/77 - 1s - loss: 0.2018 - accuracy: 0.9363 - val_loss: 1.3790 - val_accuracy: 0.7009 - 1s/epoch - 15ms/step Epoch 143/200 77/77 - 1s - loss: 0.1925 - accuracy: 0.9348 - val_loss: 1.1297 - val_accuracy: 0.7336 - 1s/epoch - 15ms/step Epoch 144/200 77/77 - 1s - loss: 0.2022 - accuracy: 0.9358 - val_loss: 1.4766 - val_accuracy: 0.6893 - 1s/epoch - 15ms/step Epoch 145/200 77/77 - 1s - loss: 0.1750 - accuracy: 0.9485 - val_loss: 0.8584 - val_accuracy: 0.8037 - 1s/epoch - 15ms/step Epoch 146/200 77/77 - 1s - loss: 0.1716 - accuracy: 0.9407 - val_loss: 2.0207 - val_accuracy: 0.6028 - 1s/epoch - 15ms/step Epoch 147/200 77/77 - 1s - loss: 0.1765 - accuracy: 0.9446 - val_loss: 0.8974 - val_accuracy: 0.7921 - 1s/epoch - 15ms/step Epoch 148/200 77/77 - 1s - loss: 0.2202 - accuracy: 0.9293 - val_loss: 1.2410 - val_accuracy: 0.6846 - 1s/epoch - 15ms/step Epoch 149/200 77/77 - 1s - loss: 0.1612 - accuracy: 0.9452 - val_loss: 1.3236 - val_accuracy: 0.7243 - 1s/epoch - 15ms/step Epoch 150/200 77/77 - 1s - loss: 0.1870 - accuracy: 0.9400 - val_loss: 0.7305 - val_accuracy: 0.8248 - 1s/epoch - 15ms/step Epoch 151/200 77/77 - 1s - loss: 0.1559 - accuracy: 0.9524 - val_loss: 0.4713 - val_accuracy: 0.8832 - 1s/epoch - 15ms/step Epoch 152/200 77/77 - 1s - loss: 0.1499 - accuracy: 0.9537 - val_loss: 0.4667 - val_accuracy: 0.8785 - 1s/epoch - 15ms/step Epoch 153/200 77/77 - 1s - loss: 0.1912 - accuracy: 0.9350 - val_loss: 0.9394 - val_accuracy: 0.7593 - 1s/epoch - 15ms/step Epoch 154/200 77/77 - 1s - loss: 0.1861 - accuracy: 0.9415 - val_loss: 4.0100 - val_accuracy: 0.4322 - 1s/epoch - 15ms/step Epoch 155/200 77/77 - 1s - loss: 0.2066 - accuracy: 0.9340 - val_loss: 0.7759 - val_accuracy: 0.8154 - 1s/epoch - 15ms/step Epoch 156/200 77/77 - 1s - loss: 0.2022 - accuracy: 0.9324 - val_loss: 0.7389 - val_accuracy: 0.8107 - 1s/epoch - 15ms/step Epoch 157/200 77/77 - 1s - loss: 0.1559 - accuracy: 0.9496 - val_loss: 0.4767 - val_accuracy: 0.8738 - 1s/epoch - 15ms/step Epoch 158/200 77/77 - 1s - loss: 0.1561 - accuracy: 0.9504 - val_loss: 0.6723 - val_accuracy: 0.8341 - 1s/epoch - 15ms/step Epoch 159/200 77/77 - 1s - loss: 0.1450 - accuracy: 0.9540 - val_loss: 1.8042 - val_accuracy: 0.6986 - 1s/epoch - 15ms/step Epoch 160/200 77/77 - 1s - loss: 0.1708 - accuracy: 0.9454 - val_loss: 1.3233 - val_accuracy: 0.7126 - 1s/epoch - 15ms/step Epoch 161/200 77/77 - 1s - loss: 0.1556 - accuracy: 0.9480 - val_loss: 1.2577 - val_accuracy: 0.7360 - 1s/epoch - 18ms/step Epoch 162/200 77/77 - 1s - loss: 0.1678 - accuracy: 0.9480 - val_loss: 1.1420 - val_accuracy: 0.7710 - 1s/epoch - 19ms/step Epoch 163/200 77/77 - 1s - loss: 0.1861 - accuracy: 0.9342 - val_loss: 0.7558 - val_accuracy: 0.8411 - 1s/epoch - 18ms/step Epoch 164/200 77/77 - 1s - loss: 0.1737 - accuracy: 0.9436 - val_loss: 1.5686 - val_accuracy: 0.6589 - 1s/epoch - 15ms/step Epoch 165/200 77/77 - 1s - loss: 0.1800 - accuracy: 0.9444 - val_loss: 1.0370 - val_accuracy: 0.7710 - 1s/epoch - 15ms/step Epoch 166/200 77/77 - 1s - loss: 0.1503 - accuracy: 0.9543 - val_loss: 1.2218 - val_accuracy: 0.7593 - 1s/epoch - 15ms/step Epoch 167/200 77/77 - 1s - loss: 0.1462 - accuracy: 0.9558 - val_loss: 0.8459 - val_accuracy: 0.8131 - 1s/epoch - 15ms/step Epoch 168/200 77/77 - 1s - loss: 0.1229 - accuracy: 0.9581 - val_loss: 0.4940 - val_accuracy: 0.8925 - 1s/epoch - 15ms/step Epoch 169/200 77/77 - 1s - loss: 0.1561 - accuracy: 0.9511 - val_loss: 0.5388 - val_accuracy: 0.8715 - 1s/epoch - 15ms/step Epoch 170/200 77/77 - 1s - loss: 0.1536 - accuracy: 0.9480 - val_loss: 0.6893 - val_accuracy: 0.8505 - 1s/epoch - 15ms/step Epoch 171/200 77/77 - 1s - loss: 0.1631 - accuracy: 0.9496 - val_loss: 0.5194 - val_accuracy: 0.8855 - 1s/epoch - 15ms/step Epoch 172/200 77/77 - 1s - loss: 0.1403 - accuracy: 0.9571 - val_loss: 3.1395 - val_accuracy: 0.5981 - 1s/epoch - 15ms/step Epoch 173/200 77/77 - 1s - loss: 0.1439 - accuracy: 0.9509 - val_loss: 0.4587 - val_accuracy: 0.8785 - 1s/epoch - 16ms/step Epoch 174/200 77/77 - 1s - loss: 0.1405 - accuracy: 0.9558 - val_loss: 0.4486 - val_accuracy: 0.8879 - 1s/epoch - 15ms/step Epoch 175/200 77/77 - 1s - loss: 0.1526 - accuracy: 0.9511 - val_loss: 1.6330 - val_accuracy: 0.6682 - 1s/epoch - 15ms/step Epoch 176/200 77/77 - 1s - loss: 0.1133 - accuracy: 0.9602 - val_loss: 1.0183 - val_accuracy: 0.7944 - 1s/epoch - 15ms/step Epoch 177/200 77/77 - 1s - loss: 0.1381 - accuracy: 0.9581 - val_loss: 1.0041 - val_accuracy: 0.7850 - 1s/epoch - 15ms/step Epoch 178/200 77/77 - 1s - loss: 0.1627 - accuracy: 0.9485 - val_loss: 1.7937 - val_accuracy: 0.6682 - 1s/epoch - 15ms/step Epoch 179/200 77/77 - 1s - loss: 0.1382 - accuracy: 0.9555 - val_loss: 1.2323 - val_accuracy: 0.7196 - 1s/epoch - 15ms/step Epoch 180/200 77/77 - 1s - loss: 0.1669 - accuracy: 0.9524 - val_loss: 0.9157 - val_accuracy: 0.7874 - 1s/epoch - 15ms/step Epoch 181/200 77/77 - 1s - loss: 0.1320 - accuracy: 0.9592 - val_loss: 0.5499 - val_accuracy: 0.8715 - 1s/epoch - 15ms/step Epoch 182/200 77/77 - 1s - loss: 0.1430 - accuracy: 0.9600 - val_loss: 0.5914 - val_accuracy: 0.8528 - 1s/epoch - 15ms/step Epoch 183/200 77/77 - 1s - loss: 0.1550 - accuracy: 0.9511 - val_loss: 0.6271 - val_accuracy: 0.8528 - 1s/epoch - 15ms/step Epoch 184/200 77/77 - 1s - loss: 0.1230 - accuracy: 0.9636 - val_loss: 0.4785 - val_accuracy: 0.8949 - 1s/epoch - 15ms/step Epoch 185/200 77/77 - 1s - loss: 0.1955 - accuracy: 0.9379 - val_loss: 0.8892 - val_accuracy: 0.7967 - 1s/epoch - 15ms/step Epoch 186/200 77/77 - 1s - loss: 0.1287 - accuracy: 0.9581 - val_loss: 0.4366 - val_accuracy: 0.8902 - 1s/epoch - 15ms/step Epoch 187/200 77/77 - 1s - loss: 0.1219 - accuracy: 0.9633 - val_loss: 0.5484 - val_accuracy: 0.8715 - 1s/epoch - 15ms/step Epoch 188/200 77/77 - 1s - loss: 0.1277 - accuracy: 0.9594 - val_loss: 0.5518 - val_accuracy: 0.8621 - 1s/epoch - 15ms/step Epoch 189/200 77/77 - 1s - loss: 0.1971 - accuracy: 0.9472 - val_loss: 0.8884 - val_accuracy: 0.8014 - 1s/epoch - 15ms/step Epoch 190/200 77/77 - 1s - loss: 0.1691 - accuracy: 0.9415 - val_loss: 0.3845 - val_accuracy: 0.9065 - 1s/epoch - 15ms/step Epoch 191/200 77/77 - 1s - loss: 0.1375 - accuracy: 0.9592 - val_loss: 0.4315 - val_accuracy: 0.8738 - 1s/epoch - 15ms/step Epoch 192/200 77/77 - 1s - loss: 0.1400 - accuracy: 0.9553 - val_loss: 0.4341 - val_accuracy: 0.8808 - 1s/epoch - 15ms/step Epoch 193/200 77/77 - 1s - loss: 0.1329 - accuracy: 0.9620 - val_loss: 1.2771 - val_accuracy: 0.7150 - 1s/epoch - 15ms/step Epoch 194/200 77/77 - 1s - loss: 0.1350 - accuracy: 0.9594 - val_loss: 0.9742 - val_accuracy: 0.7664 - 1s/epoch - 15ms/step Epoch 195/200 77/77 - 1s - loss: 0.0990 - accuracy: 0.9683 - val_loss: 0.4587 - val_accuracy: 0.8902 - 1s/epoch - 15ms/step Epoch 196/200 77/77 - 1s - loss: 0.1321 - accuracy: 0.9623 - val_loss: 1.0273 - val_accuracy: 0.7664 - 1s/epoch - 15ms/step Epoch 197/200 77/77 - 1s - loss: 0.1665 - accuracy: 0.9475 - val_loss: 2.4706 - val_accuracy: 0.5070 - 1s/epoch - 15ms/step Epoch 198/200 77/77 - 1s - loss: 0.1481 - accuracy: 0.9610 - val_loss: 0.7776 - val_accuracy: 0.8107 - 1s/epoch - 15ms/step Epoch 199/200 77/77 - 1s - loss: 0.1552 - accuracy: 0.9540 - val_loss: 0.4528 - val_accuracy: 0.8879 - 1s/epoch - 15ms/step Epoch 200/200 77/77 - 1s - loss: 0.1050 - accuracy: 0.9675 - val_loss: 0.4881 - val_accuracy: 0.8808 - 1s/epoch - 15ms/step
plt.plot(history_1.history['accuracy'])
plt.plot(history_1.history['val_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.show()
Observations:
model.evaluate(X_train_normalized, y_train_encoded, verbose=1)
model1_TrainDF = model_performance_classification_sklearn_with_threshold(
model, X_train_normalized, y_train_encoded,
)
model1_TrainDF
121/121 [==============================] - 1s 7ms/step - loss: 0.0782 - accuracy: 0.9737 121/121 [==============================] - 1s 5ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.973746 | 0.973746 | 0.974874 | 0.973941 |
Observations
model.evaluate(X_val_normalized, y_val_encoded, verbose=1)
model1_ValDF = model_performance_classification_sklearn_with_threshold(
model, X_val_normalized,y_val_encoded,
)
model1_ValDF
14/14 [==============================] - 0s 15ms/step - loss: 0.4881 - accuracy: 0.8808 14/14 [==============================] - 0s 6ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.880841 | 0.880841 | 0.887867 | 0.881465 |
Observations
model.evaluate(X_test_normalized, y_test_encoded, verbose=2)
model1_TestDF = model_performance_classification_sklearn_with_threshold(model, X_test_normalized, y_test_encoded)
model1_TestDF
15/15 - 0s - loss: 0.5750 - accuracy: 0.8863 - 207ms/epoch - 14ms/step 15/15 [==============================] - 0s 5ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.886316 | 0.886316 | 0.894756 | 0.887596 |
Observations:
y_pred=model.predict(X_test_normalized)
y_pred
15/15 [==============================] - 0s 5ms/step
array([[2.5825295e-10, 1.8731525e-11, 1.0227022e-10, ..., 7.6328673e-05,
3.5734458e-05, 6.1272149e-06],
[3.6871587e-08, 1.3989663e-03, 1.0832313e-03, ..., 2.1792054e-03,
9.9502015e-01, 3.6656304e-05],
[4.4761779e-07, 6.3672098e-03, 4.6234634e-03, ..., 6.0020573e-03,
9.8069340e-01, 3.5219963e-04],
...,
[9.8092252e-01, 8.8385230e-11, 1.7658030e-08, ..., 2.3761793e-12,
5.3023858e-10, 3.4472514e-09],
[6.8217845e-09, 3.8922789e-12, 3.1978364e-09, ..., 5.4159387e-05,
1.3753737e-11, 1.8412022e-06],
[1.6231765e-03, 2.2416206e-02, 1.8579045e-01, ..., 7.3115951e-03,
1.3829044e-01, 6.7066081e-02]], dtype=float32)
# Visualizing the predicted and correct label of images from test data
rows = 1
cols = 5
for i in range(cols):
for j in range(rows):
random_index = np.random.randint(0, 400)
plt.imshow(X_test[random_index])
plt.show()
print('Predicted Label', enc.inverse_transform(model.predict((X_test_normalized[random_index].reshape(1,64,64,3)))))
print('True Label', enc.inverse_transform(y_test_encoded)[random_index])
1/1 [==============================] - 0s 85ms/step Predicted Label ['Sugar beet'] True Label Sugar beet
1/1 [==============================] - 0s 17ms/step Predicted Label ['Black-grass'] True Label Loose Silky-bent
1/1 [==============================] - 0s 17ms/step Predicted Label ['Loose Silky-bent'] True Label Loose Silky-bent
1/1 [==============================] - 0s 18ms/step Predicted Label ['Scentless Mayweed'] True Label Scentless Mayweed
1/1 [==============================] - 0s 20ms/step Predicted Label ['Cleavers'] True Label Small-flowered Cranesbill
Inference:
Creating a CNN model sequentially by using Data Augumentation to check if we can improve the model performance and metrics further
# Clearing backend
backend.clear_session()
# Fixing the seed for random number generators
import random
np.random.seed(42)
random.seed(42)
tf.random.set_seed(42)
# All images to be rescaled by 1/255.
# Applying the Data Augmentation levers
train_datagen = ImageDataGenerator(
horizontal_flip = True,
vertical_flip = True,
height_shift_range= 0.1,
width_shift_range=0.1,
rotation_range=20,
shear_range = 0.1,
zoom_range=0.1,
fill_mode='nearest'
)
#test_datagen = ImageDataGenerator(rescale = 1.0/255.)
# Intializing a sequential model
model = Sequential()
# Adding first conv layer with 64 filters and kernel size 3x3 , padding 'same' provides the output size same as the input size
# Input_shape denotes input image dimension of images
# Adding first conv layer with 64 filters and kernel size 3x3 , padding 'same' provides the output size same as the input size
# Input_shape denotes input image dimension of images
model.add(Conv2D(64, (3, 3), activation='relu', padding="same", input_shape=(64, 64, 3)))
# Adding max pooling to reduce the size of output of first conv layer
model.add(MaxPooling2D(2, 2))
model.add(BatchNormalization())
model.add(Conv2D(64, (3, 3), activation='relu', padding="same"))
model.add(BatchNormalization())
model.add(MaxPooling2D(2, 2))
model.add(Dense(32, activation='relu'))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D(2, 2))
model.add(Dropout(0.3))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(Dense(32, activation='relu'))
model.add(MaxPooling2D(2, 2))
model.add(Dropout(0.3))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D(2, 2))
model.add(Conv2D(32, (3, 3), activation='relu', padding="same"))
model.add(MaxPooling2D(2, 2))
model.add(BatchNormalization())
# flattening the output of the conv layer after max pooling to make it ready for creating dense connections
model.add(Flatten())
# Adding a fully connected dense layer with 100 neurons
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.3))
# Adding a fully connected dense layer with 100 neurons
model.add(Dense(32, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(32, activation='relu'))
# Adding the output layer with 12 neurons and activation functions as softmax since this is a multi-class classification problem
model.add(Dense(12, activation='softmax'))
# Using Adam Optimizer
opt=Adam()
# Compile model
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
# Generating the summary of the model
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 64) 1792
max_pooling2d (MaxPooling2D (None, 32, 32, 64) 0
)
batch_normalization (BatchN (None, 32, 32, 64) 256
ormalization)
conv2d_1 (Conv2D) (None, 32, 32, 64) 36928
batch_normalization_1 (Batc (None, 32, 32, 64) 256
hNormalization)
max_pooling2d_1 (MaxPooling (None, 16, 16, 64) 0
2D)
dense (Dense) (None, 16, 16, 32) 2080
conv2d_2 (Conv2D) (None, 16, 16, 32) 9248
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout (Dropout) (None, 8, 8, 32) 0
conv2d_3 (Conv2D) (None, 8, 8, 32) 9248
dense_1 (Dense) (None, 8, 8, 32) 1056
max_pooling2d_3 (MaxPooling (None, 4, 4, 32) 0
2D)
dropout_1 (Dropout) (None, 4, 4, 32) 0
conv2d_4 (Conv2D) (None, 4, 4, 32) 9248
max_pooling2d_4 (MaxPooling (None, 2, 2, 32) 0
2D)
conv2d_5 (Conv2D) (None, 2, 2, 32) 9248
max_pooling2d_5 (MaxPooling (None, 1, 1, 32) 0
2D)
batch_normalization_2 (Batc (None, 1, 1, 32) 128
hNormalization)
flatten (Flatten) (None, 32) 0
dense_2 (Dense) (None, 64) 2112
dropout_2 (Dropout) (None, 64) 0
dense_3 (Dense) (None, 32) 2080
dropout_3 (Dropout) (None, 32) 0
dense_4 (Dense) (None, 32) 1056
dense_5 (Dense) (None, 12) 396
=================================================================
Total params: 85,132
Trainable params: 84,812
Non-trainable params: 320
_________________________________________________________________
Observations:
# Epochs
epochs = 200
# Batch size
batch_size = 30
history = model.fit(train_datagen.flow(X_train_normalized,y_train_encoded,
batch_size=batch_size,
seed=42,
shuffle=False),
epochs=epochs,
steps_per_epoch=X_train_normalized.shape[0] // batch_size,
validation_data=(X_val_normalized,y_val_encoded),
verbose=1)
Epoch 1/200 128/128 [==============================] - 7s 44ms/step - loss: 2.4939 - accuracy: 0.1124 - val_loss: 2.4278 - val_accuracy: 0.1028 Epoch 2/200 128/128 [==============================] - 5s 41ms/step - loss: 2.3580 - accuracy: 0.1763 - val_loss: 2.4823 - val_accuracy: 0.1285 Epoch 3/200 128/128 [==============================] - 5s 41ms/step - loss: 1.9786 - accuracy: 0.3212 - val_loss: 2.4924 - val_accuracy: 0.1682 Epoch 4/200 128/128 [==============================] - 5s 42ms/step - loss: 1.7392 - accuracy: 0.3752 - val_loss: 4.4700 - val_accuracy: 0.1659 Epoch 5/200 128/128 [==============================] - 5s 40ms/step - loss: 1.6527 - accuracy: 0.4192 - val_loss: 1.9388 - val_accuracy: 0.3318 Epoch 6/200 128/128 [==============================] - 5s 41ms/step - loss: 1.5601 - accuracy: 0.4396 - val_loss: 2.0940 - val_accuracy: 0.2103 Epoch 7/200 128/128 [==============================] - 5s 41ms/step - loss: 1.5004 - accuracy: 0.4600 - val_loss: 1.2419 - val_accuracy: 0.5654 Epoch 8/200 128/128 [==============================] - 5s 41ms/step - loss: 1.4386 - accuracy: 0.4894 - val_loss: 3.1199 - val_accuracy: 0.2570 Epoch 9/200 128/128 [==============================] - 5s 41ms/step - loss: 1.3806 - accuracy: 0.5185 - val_loss: 1.4433 - val_accuracy: 0.5187 Epoch 10/200 128/128 [==============================] - 5s 42ms/step - loss: 1.3531 - accuracy: 0.5342 - val_loss: 1.3506 - val_accuracy: 0.5327 Epoch 11/200 128/128 [==============================] - 5s 41ms/step - loss: 1.3271 - accuracy: 0.5345 - val_loss: 3.3043 - val_accuracy: 0.2921 Epoch 12/200 128/128 [==============================] - 5s 41ms/step - loss: 1.2743 - accuracy: 0.5507 - val_loss: 1.7082 - val_accuracy: 0.4182 Epoch 13/200 128/128 [==============================] - 5s 40ms/step - loss: 1.2477 - accuracy: 0.5638 - val_loss: 3.0424 - val_accuracy: 0.2477 Epoch 14/200 128/128 [==============================] - 5s 41ms/step - loss: 1.2089 - accuracy: 0.5858 - val_loss: 1.4220 - val_accuracy: 0.4883 Epoch 15/200 128/128 [==============================] - 5s 40ms/step - loss: 1.1833 - accuracy: 0.5910 - val_loss: 1.2917 - val_accuracy: 0.5514 Epoch 16/200 128/128 [==============================] - 5s 41ms/step - loss: 1.1294 - accuracy: 0.6204 - val_loss: 2.0679 - val_accuracy: 0.3715 Epoch 17/200 128/128 [==============================] - 5s 42ms/step - loss: 1.1005 - accuracy: 0.6246 - val_loss: 1.0766 - val_accuracy: 0.6098 Epoch 18/200 128/128 [==============================] - 5s 41ms/step - loss: 1.0635 - accuracy: 0.6440 - val_loss: 2.3211 - val_accuracy: 0.2991 Epoch 19/200 128/128 [==============================] - 5s 41ms/step - loss: 1.0392 - accuracy: 0.6531 - val_loss: 2.1380 - val_accuracy: 0.3832 Epoch 20/200 128/128 [==============================] - 5s 41ms/step - loss: 1.0123 - accuracy: 0.6474 - val_loss: 1.8464 - val_accuracy: 0.4322 Epoch 21/200 128/128 [==============================] - 5s 41ms/step - loss: 0.9680 - accuracy: 0.6709 - val_loss: 1.2289 - val_accuracy: 0.5724 Epoch 22/200 128/128 [==============================] - 5s 41ms/step - loss: 0.9880 - accuracy: 0.6744 - val_loss: 1.2680 - val_accuracy: 0.5444 Epoch 23/200 128/128 [==============================] - 6s 44ms/step - loss: 0.9614 - accuracy: 0.6796 - val_loss: 0.9850 - val_accuracy: 0.6986 Epoch 24/200 128/128 [==============================] - 5s 41ms/step - loss: 0.9182 - accuracy: 0.6838 - val_loss: 1.0132 - val_accuracy: 0.6589 Epoch 25/200 128/128 [==============================] - 6s 44ms/step - loss: 0.8811 - accuracy: 0.7089 - val_loss: 2.7736 - val_accuracy: 0.3762 Epoch 26/200 128/128 [==============================] - 5s 41ms/step - loss: 0.8731 - accuracy: 0.7092 - val_loss: 0.9394 - val_accuracy: 0.6939 Epoch 27/200 128/128 [==============================] - 5s 41ms/step - loss: 0.8318 - accuracy: 0.7247 - val_loss: 1.6999 - val_accuracy: 0.5140 Epoch 28/200 128/128 [==============================] - 5s 41ms/step - loss: 0.8553 - accuracy: 0.7226 - val_loss: 0.7422 - val_accuracy: 0.7500 Epoch 29/200 128/128 [==============================] - 5s 41ms/step - loss: 0.8079 - accuracy: 0.7357 - val_loss: 1.0259 - val_accuracy: 0.6355 Epoch 30/200 128/128 [==============================] - 5s 41ms/step - loss: 0.8222 - accuracy: 0.7307 - val_loss: 2.4390 - val_accuracy: 0.2290 Epoch 31/200 128/128 [==============================] - 5s 41ms/step - loss: 0.7719 - accuracy: 0.7364 - val_loss: 0.9686 - val_accuracy: 0.6939 Epoch 32/200 128/128 [==============================] - 5s 42ms/step - loss: 0.7861 - accuracy: 0.7404 - val_loss: 0.8620 - val_accuracy: 0.7056 Epoch 33/200 128/128 [==============================] - 5s 41ms/step - loss: 0.7826 - accuracy: 0.7383 - val_loss: 1.0813 - val_accuracy: 0.6449 Epoch 34/200 128/128 [==============================] - 5s 41ms/step - loss: 0.7701 - accuracy: 0.7480 - val_loss: 0.9110 - val_accuracy: 0.6822 Epoch 35/200 128/128 [==============================] - 5s 40ms/step - loss: 0.7456 - accuracy: 0.7522 - val_loss: 1.5704 - val_accuracy: 0.6005 Epoch 36/200 128/128 [==============================] - 5s 42ms/step - loss: 0.7342 - accuracy: 0.7561 - val_loss: 4.0755 - val_accuracy: 0.2921 Epoch 37/200 128/128 [==============================] - 5s 41ms/step - loss: 0.7279 - accuracy: 0.7642 - val_loss: 0.9836 - val_accuracy: 0.6752 Epoch 38/200 128/128 [==============================] - 5s 41ms/step - loss: 0.7088 - accuracy: 0.7616 - val_loss: 1.7569 - val_accuracy: 0.4953 Epoch 39/200 128/128 [==============================] - 5s 41ms/step - loss: 0.7184 - accuracy: 0.7582 - val_loss: 1.4326 - val_accuracy: 0.5397 Epoch 40/200 128/128 [==============================] - 8s 63ms/step - loss: 0.6835 - accuracy: 0.7739 - val_loss: 0.9249 - val_accuracy: 0.6752 Epoch 41/200 128/128 [==============================] - 5s 42ms/step - loss: 0.6607 - accuracy: 0.7794 - val_loss: 2.1595 - val_accuracy: 0.4743 Epoch 42/200 128/128 [==============================] - 6s 43ms/step - loss: 0.6860 - accuracy: 0.7681 - val_loss: 1.0651 - val_accuracy: 0.6565 Epoch 43/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6546 - accuracy: 0.7818 - val_loss: 0.7759 - val_accuracy: 0.7313 Epoch 44/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6592 - accuracy: 0.7755 - val_loss: 1.1068 - val_accuracy: 0.6379 Epoch 45/200 128/128 [==============================] - 6s 44ms/step - loss: 0.6410 - accuracy: 0.7799 - val_loss: 1.1661 - val_accuracy: 0.6752 Epoch 46/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6189 - accuracy: 0.7891 - val_loss: 0.9643 - val_accuracy: 0.7033 Epoch 47/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6407 - accuracy: 0.7750 - val_loss: 0.5878 - val_accuracy: 0.8084 Epoch 48/200 128/128 [==============================] - 5s 40ms/step - loss: 0.6360 - accuracy: 0.7883 - val_loss: 0.7406 - val_accuracy: 0.7430 Epoch 49/200 128/128 [==============================] - 5s 42ms/step - loss: 0.5796 - accuracy: 0.7951 - val_loss: 0.7106 - val_accuracy: 0.7710 Epoch 50/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6240 - accuracy: 0.7915 - val_loss: 0.7714 - val_accuracy: 0.7640 Epoch 51/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6180 - accuracy: 0.7901 - val_loss: 0.6777 - val_accuracy: 0.7710 Epoch 52/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6130 - accuracy: 0.7975 - val_loss: 0.5645 - val_accuracy: 0.8131 Epoch 53/200 128/128 [==============================] - 5s 41ms/step - loss: 0.6128 - accuracy: 0.7899 - val_loss: 0.4666 - val_accuracy: 0.8458 Epoch 54/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5919 - accuracy: 0.7957 - val_loss: 0.9996 - val_accuracy: 0.7173 Epoch 55/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5692 - accuracy: 0.8046 - val_loss: 0.6480 - val_accuracy: 0.7804 Epoch 56/200 128/128 [==============================] - 7s 52ms/step - loss: 0.5662 - accuracy: 0.8080 - val_loss: 1.4497 - val_accuracy: 0.6308 Epoch 57/200 128/128 [==============================] - 6s 43ms/step - loss: 0.5657 - accuracy: 0.8108 - val_loss: 0.5125 - val_accuracy: 0.8154 Epoch 58/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5619 - accuracy: 0.8067 - val_loss: 0.6239 - val_accuracy: 0.8061 Epoch 59/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5319 - accuracy: 0.8184 - val_loss: 0.4419 - val_accuracy: 0.8505 Epoch 60/200 128/128 [==============================] - 5s 42ms/step - loss: 0.5332 - accuracy: 0.8221 - val_loss: 0.5705 - val_accuracy: 0.8178 Epoch 61/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5996 - accuracy: 0.7943 - val_loss: 0.9098 - val_accuracy: 0.7523 Epoch 62/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5390 - accuracy: 0.8190 - val_loss: 0.7491 - val_accuracy: 0.7477 Epoch 63/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5348 - accuracy: 0.8213 - val_loss: 1.4596 - val_accuracy: 0.6379 Epoch 64/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5295 - accuracy: 0.8213 - val_loss: 0.8024 - val_accuracy: 0.7640 Epoch 65/200 128/128 [==============================] - 6s 43ms/step - loss: 0.5021 - accuracy: 0.8313 - val_loss: 0.5257 - val_accuracy: 0.8294 Epoch 66/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5327 - accuracy: 0.8258 - val_loss: 0.3970 - val_accuracy: 0.8692 Epoch 67/200 128/128 [==============================] - 5s 42ms/step - loss: 0.5031 - accuracy: 0.8229 - val_loss: 0.6878 - val_accuracy: 0.7617 Epoch 68/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5057 - accuracy: 0.8276 - val_loss: 1.3814 - val_accuracy: 0.6192 Epoch 69/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5169 - accuracy: 0.8266 - val_loss: 0.5553 - val_accuracy: 0.8084 Epoch 70/200 128/128 [==============================] - 5s 41ms/step - loss: 0.5019 - accuracy: 0.8305 - val_loss: 0.5101 - val_accuracy: 0.8224 Epoch 71/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4820 - accuracy: 0.8407 - val_loss: 1.7412 - val_accuracy: 0.6285 Epoch 72/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4772 - accuracy: 0.8329 - val_loss: 0.4152 - val_accuracy: 0.8598 Epoch 73/200 128/128 [==============================] - 6s 44ms/step - loss: 0.5052 - accuracy: 0.8279 - val_loss: 1.2373 - val_accuracy: 0.6168 Epoch 74/200 128/128 [==============================] - 5s 42ms/step - loss: 0.4703 - accuracy: 0.8480 - val_loss: 0.4226 - val_accuracy: 0.8645 Epoch 75/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4565 - accuracy: 0.8402 - val_loss: 0.6687 - val_accuracy: 0.7897 Epoch 76/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4403 - accuracy: 0.8480 - val_loss: 0.5239 - val_accuracy: 0.8271 Epoch 77/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4899 - accuracy: 0.8357 - val_loss: 1.4715 - val_accuracy: 0.6192 Epoch 78/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4830 - accuracy: 0.8394 - val_loss: 0.4089 - val_accuracy: 0.8505 Epoch 79/200 128/128 [==============================] - 5s 42ms/step - loss: 0.4525 - accuracy: 0.8475 - val_loss: 0.6508 - val_accuracy: 0.7944 Epoch 80/200 128/128 [==============================] - 6s 43ms/step - loss: 0.4094 - accuracy: 0.8619 - val_loss: 0.6761 - val_accuracy: 0.7991 Epoch 81/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4596 - accuracy: 0.8402 - val_loss: 1.0543 - val_accuracy: 0.6145 Epoch 82/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4643 - accuracy: 0.8334 - val_loss: 1.0630 - val_accuracy: 0.7243 Epoch 83/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4530 - accuracy: 0.8436 - val_loss: 0.4625 - val_accuracy: 0.8364 Epoch 84/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4458 - accuracy: 0.8541 - val_loss: 1.2329 - val_accuracy: 0.6542 Epoch 85/200 128/128 [==============================] - 6s 43ms/step - loss: 0.4176 - accuracy: 0.8564 - val_loss: 0.6211 - val_accuracy: 0.7874 Epoch 86/200 128/128 [==============================] - 6s 48ms/step - loss: 0.4693 - accuracy: 0.8444 - val_loss: 0.6672 - val_accuracy: 0.7967 Epoch 87/200 128/128 [==============================] - 6s 43ms/step - loss: 0.4379 - accuracy: 0.8549 - val_loss: 0.9556 - val_accuracy: 0.6916 Epoch 88/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4032 - accuracy: 0.8604 - val_loss: 0.4186 - val_accuracy: 0.8832 Epoch 89/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4087 - accuracy: 0.8677 - val_loss: 0.5084 - val_accuracy: 0.8341 Epoch 90/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4115 - accuracy: 0.8601 - val_loss: 0.4140 - val_accuracy: 0.8575 Epoch 91/200 128/128 [==============================] - 5s 43ms/step - loss: 0.4168 - accuracy: 0.8609 - val_loss: 0.9081 - val_accuracy: 0.7453 Epoch 92/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4270 - accuracy: 0.8562 - val_loss: 0.4540 - val_accuracy: 0.8621 Epoch 93/200 128/128 [==============================] - 5s 42ms/step - loss: 0.4080 - accuracy: 0.8567 - val_loss: 1.3221 - val_accuracy: 0.6589 Epoch 94/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4159 - accuracy: 0.8556 - val_loss: 0.4579 - val_accuracy: 0.8341 Epoch 95/200 128/128 [==============================] - 5s 43ms/step - loss: 0.3769 - accuracy: 0.8750 - val_loss: 0.4313 - val_accuracy: 0.8692 Epoch 96/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4278 - accuracy: 0.8538 - val_loss: 1.0849 - val_accuracy: 0.7196 Epoch 97/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4084 - accuracy: 0.8522 - val_loss: 0.6176 - val_accuracy: 0.8131 Epoch 98/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3882 - accuracy: 0.8708 - val_loss: 0.4960 - val_accuracy: 0.8364 Epoch 99/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4020 - accuracy: 0.8638 - val_loss: 0.7156 - val_accuracy: 0.8061 Epoch 100/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4031 - accuracy: 0.8674 - val_loss: 1.2692 - val_accuracy: 0.6308 Epoch 101/200 128/128 [==============================] - 5s 41ms/step - loss: 0.4076 - accuracy: 0.8617 - val_loss: 0.6873 - val_accuracy: 0.8061 Epoch 102/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3887 - accuracy: 0.8577 - val_loss: 1.3242 - val_accuracy: 0.7103 Epoch 103/200 128/128 [==============================] - 6s 43ms/step - loss: 0.4041 - accuracy: 0.8583 - val_loss: 0.3587 - val_accuracy: 0.8715 Epoch 104/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3864 - accuracy: 0.8666 - val_loss: 4.7652 - val_accuracy: 0.3692 Epoch 105/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3895 - accuracy: 0.8680 - val_loss: 0.4881 - val_accuracy: 0.8528 Epoch 106/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3559 - accuracy: 0.8805 - val_loss: 0.4824 - val_accuracy: 0.8435 Epoch 107/200 128/128 [==============================] - 6s 43ms/step - loss: 0.3943 - accuracy: 0.8648 - val_loss: 1.5649 - val_accuracy: 0.5958 Epoch 108/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3966 - accuracy: 0.8617 - val_loss: 1.5344 - val_accuracy: 0.6939 Epoch 109/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3637 - accuracy: 0.8742 - val_loss: 1.0776 - val_accuracy: 0.7547 Epoch 110/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3479 - accuracy: 0.8792 - val_loss: 0.6281 - val_accuracy: 0.8037 Epoch 111/200 128/128 [==============================] - 6s 44ms/step - loss: 0.3498 - accuracy: 0.8863 - val_loss: 1.5955 - val_accuracy: 0.6659 Epoch 112/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3706 - accuracy: 0.8719 - val_loss: 0.3286 - val_accuracy: 0.9206 Epoch 113/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3658 - accuracy: 0.8750 - val_loss: 0.4361 - val_accuracy: 0.8692 Epoch 114/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3485 - accuracy: 0.8818 - val_loss: 0.6118 - val_accuracy: 0.8505 Epoch 115/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3708 - accuracy: 0.8842 - val_loss: 0.3111 - val_accuracy: 0.8879 Epoch 116/200 128/128 [==============================] - 5s 43ms/step - loss: 0.3295 - accuracy: 0.8910 - val_loss: 1.2392 - val_accuracy: 0.6285 Epoch 117/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3717 - accuracy: 0.8750 - val_loss: 0.5155 - val_accuracy: 0.8621 Epoch 118/200 128/128 [==============================] - 6s 43ms/step - loss: 0.3630 - accuracy: 0.8805 - val_loss: 2.9767 - val_accuracy: 0.3715 Epoch 119/200 128/128 [==============================] - 7s 51ms/step - loss: 0.3402 - accuracy: 0.8784 - val_loss: 2.2329 - val_accuracy: 0.4836 Epoch 120/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3803 - accuracy: 0.8763 - val_loss: 1.1495 - val_accuracy: 0.7220 Epoch 121/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3283 - accuracy: 0.8908 - val_loss: 0.2934 - val_accuracy: 0.9065 Epoch 122/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3330 - accuracy: 0.8887 - val_loss: 2.3880 - val_accuracy: 0.5234 Epoch 123/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3398 - accuracy: 0.8818 - val_loss: 1.3506 - val_accuracy: 0.7196 Epoch 124/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3437 - accuracy: 0.8839 - val_loss: 0.5650 - val_accuracy: 0.8411 Epoch 125/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3481 - accuracy: 0.8795 - val_loss: 0.8538 - val_accuracy: 0.7570 Epoch 126/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3364 - accuracy: 0.8884 - val_loss: 1.0054 - val_accuracy: 0.7547 Epoch 127/200 128/128 [==============================] - 6s 43ms/step - loss: 0.3277 - accuracy: 0.8863 - val_loss: 0.8878 - val_accuracy: 0.7757 Epoch 128/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3419 - accuracy: 0.8871 - val_loss: 0.8952 - val_accuracy: 0.7850 Epoch 129/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3381 - accuracy: 0.8879 - val_loss: 1.2390 - val_accuracy: 0.7243 Epoch 130/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3284 - accuracy: 0.8923 - val_loss: 0.4740 - val_accuracy: 0.8692 Epoch 131/200 128/128 [==============================] - 6s 44ms/step - loss: 0.3294 - accuracy: 0.8908 - val_loss: 0.4837 - val_accuracy: 0.8832 Epoch 132/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3214 - accuracy: 0.8957 - val_loss: 0.4135 - val_accuracy: 0.8785 Epoch 133/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3125 - accuracy: 0.8931 - val_loss: 0.3945 - val_accuracy: 0.8762 Epoch 134/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3208 - accuracy: 0.8915 - val_loss: 2.0014 - val_accuracy: 0.4579 Epoch 135/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3952 - accuracy: 0.8687 - val_loss: 0.4472 - val_accuracy: 0.8458 Epoch 136/200 128/128 [==============================] - 5s 42ms/step - loss: 0.3160 - accuracy: 0.8902 - val_loss: 0.3148 - val_accuracy: 0.9136 Epoch 137/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3193 - accuracy: 0.8936 - val_loss: 0.4318 - val_accuracy: 0.8855 Epoch 138/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3251 - accuracy: 0.8894 - val_loss: 0.3365 - val_accuracy: 0.8949 Epoch 139/200 128/128 [==============================] - 6s 43ms/step - loss: 0.3138 - accuracy: 0.8970 - val_loss: 0.8588 - val_accuracy: 0.7570 Epoch 140/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2957 - accuracy: 0.9002 - val_loss: 0.7345 - val_accuracy: 0.8061 Epoch 141/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2802 - accuracy: 0.9062 - val_loss: 0.8527 - val_accuracy: 0.7266 Epoch 142/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3011 - accuracy: 0.8965 - val_loss: 0.9023 - val_accuracy: 0.7173 Epoch 143/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2907 - accuracy: 0.9046 - val_loss: 0.6603 - val_accuracy: 0.8388 Epoch 144/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2945 - accuracy: 0.8947 - val_loss: 0.3574 - val_accuracy: 0.8972 Epoch 145/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3221 - accuracy: 0.8887 - val_loss: 0.3667 - val_accuracy: 0.8785 Epoch 146/200 128/128 [==============================] - 5s 43ms/step - loss: 0.3186 - accuracy: 0.8926 - val_loss: 0.4729 - val_accuracy: 0.8458 Epoch 147/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3063 - accuracy: 0.8973 - val_loss: 0.8123 - val_accuracy: 0.8318 Epoch 148/200 128/128 [==============================] - 6s 47ms/step - loss: 0.2944 - accuracy: 0.9004 - val_loss: 0.7429 - val_accuracy: 0.7897 Epoch 149/200 128/128 [==============================] - 6s 46ms/step - loss: 0.3097 - accuracy: 0.8913 - val_loss: 0.3475 - val_accuracy: 0.8879 Epoch 150/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2879 - accuracy: 0.8968 - val_loss: 0.3697 - val_accuracy: 0.8762 Epoch 151/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2880 - accuracy: 0.8986 - val_loss: 1.0873 - val_accuracy: 0.7126 Epoch 152/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2981 - accuracy: 0.8999 - val_loss: 0.4143 - val_accuracy: 0.8738 Epoch 153/200 128/128 [==============================] - 6s 43ms/step - loss: 0.2985 - accuracy: 0.8986 - val_loss: 0.5909 - val_accuracy: 0.8435 Epoch 154/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2989 - accuracy: 0.9023 - val_loss: 0.6861 - val_accuracy: 0.8598 Epoch 155/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2899 - accuracy: 0.8978 - val_loss: 0.4657 - val_accuracy: 0.8832 Epoch 156/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2952 - accuracy: 0.9041 - val_loss: 0.6364 - val_accuracy: 0.7991 Epoch 157/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2725 - accuracy: 0.9041 - val_loss: 0.6693 - val_accuracy: 0.8318 Epoch 158/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3070 - accuracy: 0.9044 - val_loss: 0.5411 - val_accuracy: 0.8341 Epoch 159/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2750 - accuracy: 0.9088 - val_loss: 0.4746 - val_accuracy: 0.8645 Epoch 160/200 128/128 [==============================] - 6s 43ms/step - loss: 0.3074 - accuracy: 0.8965 - val_loss: 0.5492 - val_accuracy: 0.8364 Epoch 161/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2669 - accuracy: 0.9141 - val_loss: 1.0586 - val_accuracy: 0.7290 Epoch 162/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3084 - accuracy: 0.8955 - val_loss: 0.4879 - val_accuracy: 0.8551 Epoch 163/200 128/128 [==============================] - 5s 41ms/step - loss: 0.3011 - accuracy: 0.8970 - val_loss: 0.7613 - val_accuracy: 0.7921 Epoch 164/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2828 - accuracy: 0.9088 - val_loss: 2.5852 - val_accuracy: 0.5304 Epoch 165/200 128/128 [==============================] - 5s 43ms/step - loss: 0.2863 - accuracy: 0.9088 - val_loss: 0.4113 - val_accuracy: 0.8785 Epoch 166/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2936 - accuracy: 0.9075 - val_loss: 0.5468 - val_accuracy: 0.8318 Epoch 167/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2875 - accuracy: 0.9054 - val_loss: 0.4999 - val_accuracy: 0.8575 Epoch 168/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2408 - accuracy: 0.9180 - val_loss: 0.3272 - val_accuracy: 0.9019 Epoch 169/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2539 - accuracy: 0.9198 - val_loss: 0.4010 - val_accuracy: 0.8738 Epoch 170/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2727 - accuracy: 0.9101 - val_loss: 0.7971 - val_accuracy: 0.8224 Epoch 171/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2811 - accuracy: 0.9012 - val_loss: 3.8131 - val_accuracy: 0.4743 Epoch 172/200 128/128 [==============================] - 6s 44ms/step - loss: 0.2796 - accuracy: 0.9054 - val_loss: 0.7114 - val_accuracy: 0.8411 Epoch 173/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2725 - accuracy: 0.9088 - val_loss: 0.5974 - val_accuracy: 0.8598 Epoch 174/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2513 - accuracy: 0.9167 - val_loss: 0.3013 - val_accuracy: 0.9019 Epoch 175/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2717 - accuracy: 0.9073 - val_loss: 0.6183 - val_accuracy: 0.8154 Epoch 176/200 128/128 [==============================] - 6s 43ms/step - loss: 0.2841 - accuracy: 0.9073 - val_loss: 0.5470 - val_accuracy: 0.8621 Epoch 177/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2545 - accuracy: 0.9149 - val_loss: 0.3834 - val_accuracy: 0.8832 Epoch 178/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2860 - accuracy: 0.9010 - val_loss: 1.3895 - val_accuracy: 0.7383 Epoch 179/200 128/128 [==============================] - 7s 53ms/step - loss: 0.2698 - accuracy: 0.9101 - val_loss: 0.4915 - val_accuracy: 0.8692 Epoch 180/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2815 - accuracy: 0.9036 - val_loss: 0.6526 - val_accuracy: 0.8294 Epoch 181/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2644 - accuracy: 0.9149 - val_loss: 0.4148 - val_accuracy: 0.8832 Epoch 182/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2735 - accuracy: 0.9086 - val_loss: 0.3858 - val_accuracy: 0.8925 Epoch 183/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2686 - accuracy: 0.9112 - val_loss: 0.8041 - val_accuracy: 0.7780 Epoch 184/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2589 - accuracy: 0.9141 - val_loss: 0.4944 - val_accuracy: 0.8715 Epoch 185/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2599 - accuracy: 0.9172 - val_loss: 0.4712 - val_accuracy: 0.8715 Epoch 186/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2653 - accuracy: 0.9094 - val_loss: 0.3401 - val_accuracy: 0.8995 Epoch 187/200 128/128 [==============================] - 6s 44ms/step - loss: 0.2734 - accuracy: 0.9099 - val_loss: 0.3458 - val_accuracy: 0.9112 Epoch 188/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2477 - accuracy: 0.9206 - val_loss: 0.3527 - val_accuracy: 0.9136 Epoch 189/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2655 - accuracy: 0.9133 - val_loss: 0.4563 - val_accuracy: 0.8855 Epoch 190/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2699 - accuracy: 0.9130 - val_loss: 0.4260 - val_accuracy: 0.8832 Epoch 191/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2661 - accuracy: 0.9109 - val_loss: 0.5272 - val_accuracy: 0.8762 Epoch 192/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2636 - accuracy: 0.9080 - val_loss: 0.7638 - val_accuracy: 0.7804 Epoch 193/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2402 - accuracy: 0.9206 - val_loss: 0.2480 - val_accuracy: 0.9299 Epoch 194/200 128/128 [==============================] - 6s 43ms/step - loss: 0.2631 - accuracy: 0.9112 - val_loss: 1.0812 - val_accuracy: 0.7079 Epoch 195/200 128/128 [==============================] - 5s 43ms/step - loss: 0.2570 - accuracy: 0.9143 - val_loss: 0.4845 - val_accuracy: 0.8481 Epoch 196/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2503 - accuracy: 0.9217 - val_loss: 0.2763 - val_accuracy: 0.9182 Epoch 197/200 128/128 [==============================] - 5s 42ms/step - loss: 0.2310 - accuracy: 0.9209 - val_loss: 0.4874 - val_accuracy: 0.8808 Epoch 198/200 128/128 [==============================] - 6s 43ms/step - loss: 0.2703 - accuracy: 0.9167 - val_loss: 0.4553 - val_accuracy: 0.8902 Epoch 199/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2219 - accuracy: 0.9274 - val_loss: 0.4491 - val_accuracy: 0.8668 Epoch 200/200 128/128 [==============================] - 5s 41ms/step - loss: 0.2278 - accuracy: 0.9230 - val_loss: 0.3396 - val_accuracy: 0.9229
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.show()
Observations:
model.evaluate(X_train_normalized, y_train_encoded, verbose=1)
model2_TrainDF = model_performance_classification_sklearn_with_threshold(
model, X_train_normalized, y_train_encoded,
)
model2_TrainDF
121/121 [==============================] - 1s 7ms/step - loss: 0.1691 - accuracy: 0.9465 121/121 [==============================] - 1s 6ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.946452 | 0.946452 | 0.947343 | 0.946507 |
Observations
model.evaluate(X_val_normalized, y_val_encoded, verbose=1)
model2_ValDF = model_performance_classification_sklearn_with_threshold(
model, X_val_normalized,y_val_encoded,
)
model2_ValDF
14/14 [==============================] - 0s 7ms/step - loss: 0.3396 - accuracy: 0.9229 14/14 [==============================] - 0s 6ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.922897 | 0.922897 | 0.927033 | 0.923229 |
Observations
model.evaluate(X_test_normalized, y_test_encoded, verbose=2)
model2_TestDF = model_performance_classification_sklearn_with_threshold(model, X_test_normalized, y_test_encoded)
model2_TestDF
15/15 - 0s - loss: 0.3019 - accuracy: 0.9116 - 97ms/epoch - 6ms/step 15/15 [==============================] - 0s 6ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.911579 | 0.911579 | 0.914286 | 0.91229 |
Observations
# Visualizing the predicted and correct label of images from test data
rows = 1
cols = 5
for i in range(cols):
for j in range(rows):
random_index = np.random.randint(0, 400)
plt.imshow(X_test[random_index])
plt.show()
print('Predicted Label', enc.inverse_transform(model.predict((X_test_normalized[random_index].reshape(1,64,64,3)))))
print('True Label', enc.inverse_transform(y_test_encoded)[random_index])
1/1 [==============================] - 0s 20ms/step Predicted Label ['Common wheat'] True Label Common wheat
1/1 [==============================] - 0s 18ms/step Predicted Label ['Maize'] True Label Maize
1/1 [==============================] - 0s 17ms/step Predicted Label ['Scentless Mayweed'] True Label Scentless Mayweed
1/1 [==============================] - 0s 18ms/step Predicted Label ['Small-flowered Cranesbill'] True Label Small-flowered Cranesbill
1/1 [==============================] - 0s 18ms/step Predicted Label ['Shepherds Purse'] True Label Shepherds Purse
Inference:
# Clearing backend
backend.clear_session()
# Fixing the seed for random number generators
import random
np.random.seed(42)
random.seed(42)
tf.random.set_seed(42)
# Applying the VGG16 model
from tensorflow.keras.models import Model
from keras.applications.vgg16 import VGG16
vgg_model = VGG16(weights='imagenet', include_top = False, input_shape = (64,64,3))
vgg_model.summary()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5
58889256/58889256 [==============================] - 2s 0us/step
Model: "vgg16"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
=================================================================
Total params: 14,714,688
Trainable params: 14,714,688
Non-trainable params: 0
_________________________________________________________________
Observations:
# Making all the layers of the VGG model non-trainable. i.e. freezing them
for layer in vgg_model.layers:
layer.trainable = False
new_model = Sequential()
# Adding the convolutional part of the VGG16 model from above
new_model.add(vgg_model)
# Flattening the output of the VGG16 model because it is from a convolutional layer
new_model.add(Flatten())
# Adding hidden layer with 64 neurons, relu as activation function and, he_normal as weight initializer.
new_model.add(Dense(64, activation="relu", kernel_initializer="he_normal", ))
new_model.add(Dense(64, activation="relu", kernel_initializer="he_normal", ))
new_model.add(BatchNormalization())
# Adding hidden layer with 32 neurons, relu as activation function and, he_normal as weight initializer
new_model.add(Dense(32, activation="relu", kernel_initializer="he_normal",))
# Adding Dropout with 20%
new_model.add(Dropout(0.2))
# Adding hidden layer with 16 neurons, relu as activation function and, he_normal as weight initializer
new_model.add(Dense(32, activation="relu", kernel_initializer="he_normal",))
new_model.add(Dense(16, activation="relu", kernel_initializer="he_normal",))
new_model.add(BatchNormalization())
# Adding Dropout with 20%
new_model.add(Dropout(0.2))
# Adding hidden layer with 8 neurons, relu as activation function and, he_normal as weight initializer
new_model.add(Dense(8, activation="relu", kernel_initializer="he_normal",))
new_model.add(BatchNormalization())
new_model.add(Dense(16, activation='relu'))
new_model.add(Dense(12, activation='softmax'))
opt=Adam()
# Compile model
new_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
# Generating the summary of the model
new_model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
vgg16 (Functional) (None, 2, 2, 512) 14714688
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 64) 131136
dense_1 (Dense) (None, 64) 4160
batch_normalization (BatchN (None, 64) 256
ormalization)
dense_2 (Dense) (None, 32) 2080
dropout (Dropout) (None, 32) 0
dense_3 (Dense) (None, 32) 1056
dense_4 (Dense) (None, 16) 528
batch_normalization_1 (Batc (None, 16) 64
hNormalization)
dropout_1 (Dropout) (None, 16) 0
dense_5 (Dense) (None, 8) 136
batch_normalization_2 (Batc (None, 8) 32
hNormalization)
dense_6 (Dense) (None, 16) 144
dense_7 (Dense) (None, 12) 204
=================================================================
Total params: 14,854,484
Trainable params: 139,620
Non-trainable params: 14,714,864
_________________________________________________________________
Observations:
# Epochs
epochs = 200
# Batch size
batch_size = 30
history_vgg16 = new_model.fit(train_datagen.flow(X_train_normalized,y_train_encoded,
batch_size=batch_size,
seed=42,
shuffle=False),
epochs=epochs,
steps_per_epoch=X_train_normalized.shape[0] // batch_size,
validation_data=(X_val_normalized,y_val_encoded),
verbose=1)
Epoch 1/200 128/128 [==============================] - 9s 54ms/step - loss: 2.4564 - accuracy: 0.1462 - val_loss: 2.4526 - val_accuracy: 0.1472 Epoch 2/200 128/128 [==============================] - 6s 45ms/step - loss: 2.2239 - accuracy: 0.2478 - val_loss: 2.1505 - val_accuracy: 0.2477 Epoch 3/200 128/128 [==============================] - 6s 43ms/step - loss: 2.0379 - accuracy: 0.2981 - val_loss: 2.0983 - val_accuracy: 0.2874 Epoch 4/200 128/128 [==============================] - 6s 44ms/step - loss: 1.8941 - accuracy: 0.3128 - val_loss: 1.8832 - val_accuracy: 0.3224 Epoch 5/200 128/128 [==============================] - 6s 45ms/step - loss: 1.8254 - accuracy: 0.3353 - val_loss: 1.7882 - val_accuracy: 0.3458 Epoch 6/200 128/128 [==============================] - 6s 43ms/step - loss: 1.7612 - accuracy: 0.3642 - val_loss: 1.8407 - val_accuracy: 0.3738 Epoch 7/200 128/128 [==============================] - 6s 48ms/step - loss: 1.7381 - accuracy: 0.3592 - val_loss: 1.6876 - val_accuracy: 0.4112 Epoch 8/200 128/128 [==============================] - 6s 43ms/step - loss: 1.7040 - accuracy: 0.3815 - val_loss: 1.7948 - val_accuracy: 0.3762 Epoch 9/200 128/128 [==============================] - 6s 44ms/step - loss: 1.6445 - accuracy: 0.4040 - val_loss: 1.6377 - val_accuracy: 0.4416 Epoch 10/200 128/128 [==============================] - 6s 43ms/step - loss: 1.6354 - accuracy: 0.4139 - val_loss: 1.7611 - val_accuracy: 0.3902 Epoch 11/200 128/128 [==============================] - 6s 43ms/step - loss: 1.6256 - accuracy: 0.4142 - val_loss: 1.5674 - val_accuracy: 0.4463 Epoch 12/200 128/128 [==============================] - 6s 43ms/step - loss: 1.6069 - accuracy: 0.4218 - val_loss: 1.9095 - val_accuracy: 0.3528 Epoch 13/200 128/128 [==============================] - 6s 48ms/step - loss: 1.5727 - accuracy: 0.4338 - val_loss: 2.1961 - val_accuracy: 0.2944 Epoch 14/200 128/128 [==============================] - 6s 43ms/step - loss: 1.5582 - accuracy: 0.4383 - val_loss: 1.5683 - val_accuracy: 0.4393 Epoch 15/200 128/128 [==============================] - 6s 43ms/step - loss: 1.5264 - accuracy: 0.4532 - val_loss: 1.5091 - val_accuracy: 0.4626 Epoch 16/200 128/128 [==============================] - 6s 43ms/step - loss: 1.5253 - accuracy: 0.4566 - val_loss: 1.5643 - val_accuracy: 0.4486 Epoch 17/200 128/128 [==============================] - 6s 45ms/step - loss: 1.5126 - accuracy: 0.4697 - val_loss: 1.7227 - val_accuracy: 0.4112 Epoch 18/200 128/128 [==============================] - 5s 43ms/step - loss: 1.5238 - accuracy: 0.4697 - val_loss: 1.5137 - val_accuracy: 0.4603 Epoch 19/200 128/128 [==============================] - 6s 43ms/step - loss: 1.5148 - accuracy: 0.4650 - val_loss: 1.7024 - val_accuracy: 0.3598 Epoch 20/200 128/128 [==============================] - 6s 44ms/step - loss: 1.5033 - accuracy: 0.4676 - val_loss: 1.9007 - val_accuracy: 0.3294 Epoch 21/200 128/128 [==============================] - 5s 43ms/step - loss: 1.4784 - accuracy: 0.4737 - val_loss: 1.5038 - val_accuracy: 0.4696 Epoch 22/200 128/128 [==============================] - 6s 44ms/step - loss: 1.4577 - accuracy: 0.4881 - val_loss: 1.5272 - val_accuracy: 0.4416 Epoch 23/200 128/128 [==============================] - 6s 44ms/step - loss: 1.4410 - accuracy: 0.4910 - val_loss: 1.5277 - val_accuracy: 0.4813 Epoch 24/200 128/128 [==============================] - 6s 44ms/step - loss: 1.4568 - accuracy: 0.4813 - val_loss: 1.5773 - val_accuracy: 0.4556 Epoch 25/200 128/128 [==============================] - 6s 45ms/step - loss: 1.4391 - accuracy: 0.4928 - val_loss: 1.4457 - val_accuracy: 0.5210 Epoch 26/200 128/128 [==============================] - 6s 44ms/step - loss: 1.4379 - accuracy: 0.4952 - val_loss: 1.3431 - val_accuracy: 0.5514 Epoch 27/200 128/128 [==============================] - 6s 44ms/step - loss: 1.4281 - accuracy: 0.4975 - val_loss: 2.2969 - val_accuracy: 0.3248 Epoch 28/200 128/128 [==============================] - 6s 46ms/step - loss: 1.4068 - accuracy: 0.5014 - val_loss: 1.3634 - val_accuracy: 0.5257 Epoch 29/200 128/128 [==============================] - 6s 43ms/step - loss: 1.4069 - accuracy: 0.5064 - val_loss: 1.9899 - val_accuracy: 0.3668 Epoch 30/200 128/128 [==============================] - 6s 45ms/step - loss: 1.3893 - accuracy: 0.5088 - val_loss: 1.3957 - val_accuracy: 0.5117 Epoch 31/200 128/128 [==============================] - 6s 43ms/step - loss: 1.3830 - accuracy: 0.5117 - val_loss: 1.2976 - val_accuracy: 0.5514 Epoch 32/200 128/128 [==============================] - 6s 46ms/step - loss: 1.3750 - accuracy: 0.5187 - val_loss: 1.4194 - val_accuracy: 0.5210 Epoch 33/200 128/128 [==============================] - 6s 43ms/step - loss: 1.3633 - accuracy: 0.5216 - val_loss: 1.4184 - val_accuracy: 0.4953 Epoch 34/200 128/128 [==============================] - 6s 45ms/step - loss: 1.3709 - accuracy: 0.5140 - val_loss: 1.7312 - val_accuracy: 0.4369 Epoch 35/200 128/128 [==============================] - 6s 44ms/step - loss: 1.3577 - accuracy: 0.5185 - val_loss: 1.4089 - val_accuracy: 0.5023 Epoch 36/200 128/128 [==============================] - 6s 45ms/step - loss: 1.3321 - accuracy: 0.5360 - val_loss: 1.4083 - val_accuracy: 0.5023 Epoch 37/200 128/128 [==============================] - 6s 44ms/step - loss: 1.3468 - accuracy: 0.5269 - val_loss: 1.4917 - val_accuracy: 0.4766 Epoch 38/200 128/128 [==============================] - 7s 54ms/step - loss: 1.3270 - accuracy: 0.5413 - val_loss: 1.3918 - val_accuracy: 0.5070 Epoch 39/200 128/128 [==============================] - 6s 44ms/step - loss: 1.3362 - accuracy: 0.5331 - val_loss: 1.4782 - val_accuracy: 0.4673 Epoch 40/200 128/128 [==============================] - 6s 44ms/step - loss: 1.3084 - accuracy: 0.5436 - val_loss: 1.3914 - val_accuracy: 0.5210 Epoch 41/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2927 - accuracy: 0.5468 - val_loss: 1.3751 - val_accuracy: 0.5234 Epoch 42/200 128/128 [==============================] - 6s 45ms/step - loss: 1.3022 - accuracy: 0.5368 - val_loss: 1.4383 - val_accuracy: 0.4766 Epoch 43/200 128/128 [==============================] - 6s 44ms/step - loss: 1.3077 - accuracy: 0.5358 - val_loss: 1.2781 - val_accuracy: 0.5561 Epoch 44/200 128/128 [==============================] - 6s 43ms/step - loss: 1.3005 - accuracy: 0.5447 - val_loss: 1.3189 - val_accuracy: 0.5280 Epoch 45/200 128/128 [==============================] - 6s 43ms/step - loss: 1.2761 - accuracy: 0.5520 - val_loss: 1.2953 - val_accuracy: 0.5467 Epoch 46/200 128/128 [==============================] - 6s 43ms/step - loss: 1.2893 - accuracy: 0.5572 - val_loss: 1.3069 - val_accuracy: 0.5537 Epoch 47/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2744 - accuracy: 0.5499 - val_loss: 1.3804 - val_accuracy: 0.4860 Epoch 48/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2589 - accuracy: 0.5538 - val_loss: 1.5662 - val_accuracy: 0.4579 Epoch 49/200 128/128 [==============================] - 6s 43ms/step - loss: 1.2826 - accuracy: 0.5614 - val_loss: 1.5342 - val_accuracy: 0.4790 Epoch 50/200 128/128 [==============================] - 6s 43ms/step - loss: 1.2610 - accuracy: 0.5586 - val_loss: 1.3293 - val_accuracy: 0.5187 Epoch 51/200 128/128 [==============================] - 6s 45ms/step - loss: 1.2759 - accuracy: 0.5523 - val_loss: 1.3642 - val_accuracy: 0.5093 Epoch 52/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2666 - accuracy: 0.5523 - val_loss: 1.2694 - val_accuracy: 0.5561 Epoch 53/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2571 - accuracy: 0.5672 - val_loss: 1.5299 - val_accuracy: 0.4977 Epoch 54/200 128/128 [==============================] - 6s 43ms/step - loss: 1.2428 - accuracy: 0.5680 - val_loss: 1.2175 - val_accuracy: 0.5607 Epoch 55/200 128/128 [==============================] - 6s 45ms/step - loss: 1.2528 - accuracy: 0.5546 - val_loss: 1.1794 - val_accuracy: 0.5864 Epoch 56/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2463 - accuracy: 0.5648 - val_loss: 1.4641 - val_accuracy: 0.4813 Epoch 57/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2606 - accuracy: 0.5669 - val_loss: 1.3700 - val_accuracy: 0.5444 Epoch 58/200 128/128 [==============================] - 6s 45ms/step - loss: 1.2252 - accuracy: 0.5711 - val_loss: 1.2054 - val_accuracy: 0.5771 Epoch 59/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2211 - accuracy: 0.5753 - val_loss: 1.3824 - val_accuracy: 0.5210 Epoch 60/200 128/128 [==============================] - 6s 47ms/step - loss: 1.2442 - accuracy: 0.5769 - val_loss: 1.8047 - val_accuracy: 0.4276 Epoch 61/200 128/128 [==============================] - 6s 46ms/step - loss: 1.2462 - accuracy: 0.5701 - val_loss: 1.3206 - val_accuracy: 0.5584 Epoch 62/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2416 - accuracy: 0.5643 - val_loss: 1.2202 - val_accuracy: 0.5864 Epoch 63/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2232 - accuracy: 0.5790 - val_loss: 1.7144 - val_accuracy: 0.4579 Epoch 64/200 128/128 [==============================] - 6s 45ms/step - loss: 1.2171 - accuracy: 0.5751 - val_loss: 1.2867 - val_accuracy: 0.5584 Epoch 65/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2163 - accuracy: 0.5795 - val_loss: 1.1896 - val_accuracy: 0.5981 Epoch 66/200 128/128 [==============================] - 6s 45ms/step - loss: 1.2062 - accuracy: 0.5790 - val_loss: 1.2001 - val_accuracy: 0.5631 Epoch 67/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2262 - accuracy: 0.5779 - val_loss: 1.3772 - val_accuracy: 0.5187 Epoch 68/200 128/128 [==============================] - 6s 46ms/step - loss: 1.2188 - accuracy: 0.5808 - val_loss: 1.2108 - val_accuracy: 0.5958 Epoch 69/200 128/128 [==============================] - 7s 55ms/step - loss: 1.1897 - accuracy: 0.5813 - val_loss: 1.1840 - val_accuracy: 0.5958 Epoch 70/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1874 - accuracy: 0.5937 - val_loss: 1.4988 - val_accuracy: 0.4836 Epoch 71/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1899 - accuracy: 0.5876 - val_loss: 1.1601 - val_accuracy: 0.6098 Epoch 72/200 128/128 [==============================] - 6s 45ms/step - loss: 1.2144 - accuracy: 0.5808 - val_loss: 1.2208 - val_accuracy: 0.5678 Epoch 73/200 128/128 [==============================] - 6s 44ms/step - loss: 1.2016 - accuracy: 0.5795 - val_loss: 1.2039 - val_accuracy: 0.5841 Epoch 74/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1791 - accuracy: 0.5965 - val_loss: 1.7227 - val_accuracy: 0.4229 Epoch 75/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1814 - accuracy: 0.5910 - val_loss: 1.2938 - val_accuracy: 0.5561 Epoch 76/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1705 - accuracy: 0.5992 - val_loss: 1.2303 - val_accuracy: 0.5724 Epoch 77/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1988 - accuracy: 0.5808 - val_loss: 1.1627 - val_accuracy: 0.6168 Epoch 78/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1776 - accuracy: 0.5903 - val_loss: 1.2383 - val_accuracy: 0.5678 Epoch 79/200 128/128 [==============================] - 6s 43ms/step - loss: 1.1895 - accuracy: 0.5874 - val_loss: 1.1462 - val_accuracy: 0.5864 Epoch 80/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1776 - accuracy: 0.5939 - val_loss: 1.2173 - val_accuracy: 0.5888 Epoch 81/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1741 - accuracy: 0.6049 - val_loss: 1.2303 - val_accuracy: 0.5748 Epoch 82/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1853 - accuracy: 0.5866 - val_loss: 1.2542 - val_accuracy: 0.5514 Epoch 83/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1685 - accuracy: 0.5973 - val_loss: 1.3804 - val_accuracy: 0.5491 Epoch 84/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1573 - accuracy: 0.5973 - val_loss: 1.2215 - val_accuracy: 0.5841 Epoch 85/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1788 - accuracy: 0.5926 - val_loss: 1.2400 - val_accuracy: 0.5701 Epoch 86/200 128/128 [==============================] - 6s 46ms/step - loss: 1.1695 - accuracy: 0.6010 - val_loss: 1.2790 - val_accuracy: 0.5794 Epoch 87/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1658 - accuracy: 0.5955 - val_loss: 1.0967 - val_accuracy: 0.6145 Epoch 88/200 128/128 [==============================] - 6s 43ms/step - loss: 1.1587 - accuracy: 0.5952 - val_loss: 1.1647 - val_accuracy: 0.6028 Epoch 89/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1639 - accuracy: 0.5952 - val_loss: 1.1898 - val_accuracy: 0.6005 Epoch 90/200 128/128 [==============================] - 6s 48ms/step - loss: 1.1914 - accuracy: 0.6010 - val_loss: 1.1508 - val_accuracy: 0.5958 Epoch 91/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1645 - accuracy: 0.5937 - val_loss: 1.6743 - val_accuracy: 0.4556 Epoch 92/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1433 - accuracy: 0.6078 - val_loss: 1.2063 - val_accuracy: 0.5911 Epoch 93/200 128/128 [==============================] - 6s 43ms/step - loss: 1.1270 - accuracy: 0.6096 - val_loss: 1.3283 - val_accuracy: 0.5397 Epoch 94/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1272 - accuracy: 0.6083 - val_loss: 1.3969 - val_accuracy: 0.5561 Epoch 95/200 128/128 [==============================] - 6s 46ms/step - loss: 1.1614 - accuracy: 0.5974 - val_loss: 1.1440 - val_accuracy: 0.6145 Epoch 96/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1465 - accuracy: 0.6020 - val_loss: 1.4556 - val_accuracy: 0.5187 Epoch 97/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1359 - accuracy: 0.6036 - val_loss: 1.1599 - val_accuracy: 0.5818 Epoch 98/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1438 - accuracy: 0.5986 - val_loss: 1.2106 - val_accuracy: 0.5607 Epoch 99/200 128/128 [==============================] - 6s 43ms/step - loss: 1.1534 - accuracy: 0.6015 - val_loss: 1.2408 - val_accuracy: 0.5771 Epoch 100/200 128/128 [==============================] - 6s 47ms/step - loss: 1.1384 - accuracy: 0.6091 - val_loss: 1.3613 - val_accuracy: 0.5678 Epoch 101/200 128/128 [==============================] - 6s 49ms/step - loss: 1.1182 - accuracy: 0.6070 - val_loss: 1.6120 - val_accuracy: 0.4790 Epoch 102/200 128/128 [==============================] - 6s 48ms/step - loss: 1.1714 - accuracy: 0.5882 - val_loss: 1.2130 - val_accuracy: 0.5841 Epoch 103/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1250 - accuracy: 0.6031 - val_loss: 1.5183 - val_accuracy: 0.5327 Epoch 104/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1238 - accuracy: 0.6083 - val_loss: 1.2137 - val_accuracy: 0.5888 Epoch 105/200 128/128 [==============================] - 6s 46ms/step - loss: 1.1159 - accuracy: 0.6167 - val_loss: 1.1614 - val_accuracy: 0.5864 Epoch 106/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1073 - accuracy: 0.6102 - val_loss: 1.1547 - val_accuracy: 0.5935 Epoch 107/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1385 - accuracy: 0.6089 - val_loss: 1.1268 - val_accuracy: 0.6098 Epoch 108/200 128/128 [==============================] - 6s 43ms/step - loss: 1.1352 - accuracy: 0.6039 - val_loss: 1.1905 - val_accuracy: 0.6075 Epoch 109/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1491 - accuracy: 0.6023 - val_loss: 1.1540 - val_accuracy: 0.6051 Epoch 110/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1262 - accuracy: 0.6020 - val_loss: 1.1877 - val_accuracy: 0.6028 Epoch 111/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1356 - accuracy: 0.6013 - val_loss: 1.0888 - val_accuracy: 0.6121 Epoch 112/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1292 - accuracy: 0.6104 - val_loss: 1.1215 - val_accuracy: 0.6121 Epoch 113/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1065 - accuracy: 0.6157 - val_loss: 1.1904 - val_accuracy: 0.5958 Epoch 114/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1245 - accuracy: 0.6047 - val_loss: 1.2435 - val_accuracy: 0.5701 Epoch 115/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1475 - accuracy: 0.6083 - val_loss: 1.2682 - val_accuracy: 0.5561 Epoch 116/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1175 - accuracy: 0.6128 - val_loss: 1.1009 - val_accuracy: 0.6192 Epoch 117/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1111 - accuracy: 0.6083 - val_loss: 1.0995 - val_accuracy: 0.6168 Epoch 118/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1131 - accuracy: 0.6154 - val_loss: 1.7861 - val_accuracy: 0.4322 Epoch 119/200 128/128 [==============================] - 6s 43ms/step - loss: 1.1307 - accuracy: 0.6062 - val_loss: 1.1673 - val_accuracy: 0.6192 Epoch 120/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1191 - accuracy: 0.6107 - val_loss: 1.1400 - val_accuracy: 0.6121 Epoch 121/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1130 - accuracy: 0.6146 - val_loss: 1.4964 - val_accuracy: 0.5304 Epoch 122/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1051 - accuracy: 0.6146 - val_loss: 1.0953 - val_accuracy: 0.6168 Epoch 123/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1026 - accuracy: 0.6175 - val_loss: 1.5946 - val_accuracy: 0.4883 Epoch 124/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1064 - accuracy: 0.6047 - val_loss: 1.1647 - val_accuracy: 0.5958 Epoch 125/200 128/128 [==============================] - 6s 46ms/step - loss: 1.1086 - accuracy: 0.6199 - val_loss: 1.0967 - val_accuracy: 0.6238 Epoch 126/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0806 - accuracy: 0.6259 - val_loss: 1.2440 - val_accuracy: 0.5864 Epoch 127/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1044 - accuracy: 0.6188 - val_loss: 1.1614 - val_accuracy: 0.5818 Epoch 128/200 128/128 [==============================] - 6s 47ms/step - loss: 1.1158 - accuracy: 0.6081 - val_loss: 1.8969 - val_accuracy: 0.3925 Epoch 129/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0897 - accuracy: 0.6209 - val_loss: 1.2186 - val_accuracy: 0.5724 Epoch 130/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0873 - accuracy: 0.6248 - val_loss: 1.0497 - val_accuracy: 0.6519 Epoch 131/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0994 - accuracy: 0.6159 - val_loss: 1.1550 - val_accuracy: 0.6121 Epoch 132/200 128/128 [==============================] - 7s 57ms/step - loss: 1.0992 - accuracy: 0.6259 - val_loss: 1.1689 - val_accuracy: 0.6028 Epoch 133/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1059 - accuracy: 0.6212 - val_loss: 1.2221 - val_accuracy: 0.5841 Epoch 134/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1005 - accuracy: 0.6107 - val_loss: 1.1797 - val_accuracy: 0.5981 Epoch 135/200 128/128 [==============================] - 6s 44ms/step - loss: 1.1099 - accuracy: 0.6162 - val_loss: 1.0848 - val_accuracy: 0.6168 Epoch 136/200 128/128 [==============================] - 6s 47ms/step - loss: 1.0951 - accuracy: 0.6183 - val_loss: 1.2634 - val_accuracy: 0.5561 Epoch 137/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0780 - accuracy: 0.6285 - val_loss: 1.3484 - val_accuracy: 0.5701 Epoch 138/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0966 - accuracy: 0.6196 - val_loss: 1.0993 - val_accuracy: 0.6145 Epoch 139/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1026 - accuracy: 0.6175 - val_loss: 1.1101 - val_accuracy: 0.6192 Epoch 140/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0944 - accuracy: 0.6230 - val_loss: 1.5045 - val_accuracy: 0.5280 Epoch 141/200 128/128 [==============================] - 6s 43ms/step - loss: 1.0699 - accuracy: 0.6251 - val_loss: 1.0447 - val_accuracy: 0.6332 Epoch 142/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0665 - accuracy: 0.6246 - val_loss: 1.0290 - val_accuracy: 0.6519 Epoch 143/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0555 - accuracy: 0.6356 - val_loss: 1.1737 - val_accuracy: 0.5864 Epoch 144/200 128/128 [==============================] - 6s 45ms/step - loss: 1.1110 - accuracy: 0.6073 - val_loss: 1.1600 - val_accuracy: 0.5911 Epoch 145/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0796 - accuracy: 0.6254 - val_loss: 1.0787 - val_accuracy: 0.6425 Epoch 146/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0710 - accuracy: 0.6204 - val_loss: 1.0862 - val_accuracy: 0.6472 Epoch 147/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0682 - accuracy: 0.6369 - val_loss: 1.0001 - val_accuracy: 0.6589 Epoch 148/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0573 - accuracy: 0.6314 - val_loss: 1.0529 - val_accuracy: 0.6449 Epoch 149/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0555 - accuracy: 0.6392 - val_loss: 1.2005 - val_accuracy: 0.6051 Epoch 150/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0825 - accuracy: 0.6282 - val_loss: 1.1202 - val_accuracy: 0.6051 Epoch 151/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0741 - accuracy: 0.6269 - val_loss: 1.5573 - val_accuracy: 0.5280 Epoch 152/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0574 - accuracy: 0.6400 - val_loss: 1.1009 - val_accuracy: 0.6238 Epoch 153/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0702 - accuracy: 0.6264 - val_loss: 1.3309 - val_accuracy: 0.5234 Epoch 154/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0797 - accuracy: 0.6227 - val_loss: 1.1801 - val_accuracy: 0.5958 Epoch 155/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0769 - accuracy: 0.6267 - val_loss: 1.2675 - val_accuracy: 0.5771 Epoch 156/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0581 - accuracy: 0.6314 - val_loss: 1.0109 - val_accuracy: 0.6519 Epoch 157/200 128/128 [==============================] - 6s 48ms/step - loss: 1.0633 - accuracy: 0.6306 - val_loss: 1.1450 - val_accuracy: 0.6051 Epoch 158/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0663 - accuracy: 0.6290 - val_loss: 1.0566 - val_accuracy: 0.6285 Epoch 159/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0525 - accuracy: 0.6364 - val_loss: 1.0305 - val_accuracy: 0.6355 Epoch 160/200 128/128 [==============================] - 7s 55ms/step - loss: 1.0566 - accuracy: 0.6316 - val_loss: 1.2587 - val_accuracy: 0.5794 Epoch 161/200 128/128 [==============================] - 6s 47ms/step - loss: 1.0604 - accuracy: 0.6316 - val_loss: 1.2169 - val_accuracy: 0.5911 Epoch 162/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0521 - accuracy: 0.6290 - val_loss: 1.1192 - val_accuracy: 0.6262 Epoch 163/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0508 - accuracy: 0.6351 - val_loss: 1.1037 - val_accuracy: 0.5911 Epoch 164/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0908 - accuracy: 0.6146 - val_loss: 1.1618 - val_accuracy: 0.5911 Epoch 165/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0547 - accuracy: 0.6403 - val_loss: 1.1286 - val_accuracy: 0.6121 Epoch 166/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0462 - accuracy: 0.6382 - val_loss: 1.0785 - val_accuracy: 0.6308 Epoch 167/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0292 - accuracy: 0.6421 - val_loss: 1.0757 - val_accuracy: 0.6332 Epoch 168/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0374 - accuracy: 0.6406 - val_loss: 1.0911 - val_accuracy: 0.6262 Epoch 169/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0804 - accuracy: 0.6251 - val_loss: 1.3624 - val_accuracy: 0.5631 Epoch 170/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0337 - accuracy: 0.6516 - val_loss: 1.1511 - val_accuracy: 0.5794 Epoch 171/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0658 - accuracy: 0.6285 - val_loss: 1.2314 - val_accuracy: 0.5748 Epoch 172/200 128/128 [==============================] - 6s 47ms/step - loss: 1.0421 - accuracy: 0.6351 - val_loss: 1.1225 - val_accuracy: 0.5958 Epoch 173/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0459 - accuracy: 0.6413 - val_loss: 1.3388 - val_accuracy: 0.5514 Epoch 174/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0311 - accuracy: 0.6461 - val_loss: 1.0377 - val_accuracy: 0.6636 Epoch 175/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0390 - accuracy: 0.6335 - val_loss: 1.2818 - val_accuracy: 0.5421 Epoch 176/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0710 - accuracy: 0.6316 - val_loss: 1.4536 - val_accuracy: 0.5280 Epoch 177/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0460 - accuracy: 0.6356 - val_loss: 1.0014 - val_accuracy: 0.6495 Epoch 178/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0273 - accuracy: 0.6461 - val_loss: 1.0170 - val_accuracy: 0.6659 Epoch 179/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0308 - accuracy: 0.6437 - val_loss: 1.1233 - val_accuracy: 0.5981 Epoch 180/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0389 - accuracy: 0.6398 - val_loss: 1.2872 - val_accuracy: 0.5444 Epoch 181/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0069 - accuracy: 0.6510 - val_loss: 1.0674 - val_accuracy: 0.6215 Epoch 182/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0240 - accuracy: 0.6487 - val_loss: 1.0723 - val_accuracy: 0.6308 Epoch 183/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0623 - accuracy: 0.6254 - val_loss: 1.1839 - val_accuracy: 0.5935 Epoch 184/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0102 - accuracy: 0.6539 - val_loss: 1.1701 - val_accuracy: 0.6075 Epoch 185/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0312 - accuracy: 0.6382 - val_loss: 1.2401 - val_accuracy: 0.5748 Epoch 186/200 128/128 [==============================] - 6s 47ms/step - loss: 1.0352 - accuracy: 0.6479 - val_loss: 1.0852 - val_accuracy: 0.6192 Epoch 187/200 128/128 [==============================] - 6s 49ms/step - loss: 1.0295 - accuracy: 0.6437 - val_loss: 1.1905 - val_accuracy: 0.5888 Epoch 188/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0270 - accuracy: 0.6479 - val_loss: 1.1650 - val_accuracy: 0.6028 Epoch 189/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0203 - accuracy: 0.6450 - val_loss: 1.0069 - val_accuracy: 0.6472 Epoch 190/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0311 - accuracy: 0.6419 - val_loss: 1.2137 - val_accuracy: 0.5818 Epoch 191/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0326 - accuracy: 0.6447 - val_loss: 1.2521 - val_accuracy: 0.5654 Epoch 192/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0392 - accuracy: 0.6419 - val_loss: 0.9843 - val_accuracy: 0.6379 Epoch 193/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0247 - accuracy: 0.6400 - val_loss: 1.0387 - val_accuracy: 0.6262 Epoch 194/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0320 - accuracy: 0.6377 - val_loss: 1.1779 - val_accuracy: 0.6051 Epoch 195/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0279 - accuracy: 0.6427 - val_loss: 1.2350 - val_accuracy: 0.5888 Epoch 196/200 128/128 [==============================] - 6s 46ms/step - loss: 1.0291 - accuracy: 0.6447 - val_loss: 1.0296 - val_accuracy: 0.6565 Epoch 197/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0225 - accuracy: 0.6508 - val_loss: 1.0157 - val_accuracy: 0.6519 Epoch 198/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0251 - accuracy: 0.6461 - val_loss: 1.0513 - val_accuracy: 0.6425 Epoch 199/200 128/128 [==============================] - 6s 44ms/step - loss: 1.0407 - accuracy: 0.6369 - val_loss: 1.0229 - val_accuracy: 0.6449 Epoch 200/200 128/128 [==============================] - 6s 45ms/step - loss: 1.0093 - accuracy: 0.6576 - val_loss: 1.2933 - val_accuracy: 0.5748
plt.plot(history_vgg16.history['accuracy'])
plt.plot(history_vgg16.history['val_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.show()
Observations:
new_model.evaluate(X_train_normalized, y_train_encoded, verbose=1)
model3_TrainDF = model_performance_classification_sklearn_with_threshold(
new_model, X_train_normalized, y_train_encoded,
)
model3_TrainDF
121/121 [==============================] - 2s 19ms/step - loss: 1.1144 - accuracy: 0.6241 121/121 [==============================] - 2s 16ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.624123 | 0.624123 | 0.646204 | 0.608608 |
Observations
new_model.evaluate(X_val_normalized, y_val_encoded, verbose=1)
model3_ValDF = model_performance_classification_sklearn_with_threshold(
new_model, X_val_normalized,y_val_encoded,
)
model3_ValDF
14/14 [==============================] - 0s 18ms/step - loss: 1.2933 - accuracy: 0.5748 14/14 [==============================] - 0s 16ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.574766 | 0.574766 | 0.580763 | 0.547436 |
Observations
new_model.evaluate(X_test_normalized, y_test_encoded, verbose=2)
model3_TestDF = model_performance_classification_sklearn_with_threshold(new_model, X_test_normalized, y_test_encoded)
model3_TestDF
15/15 - 1s - loss: 1.3466 - accuracy: 0.5579 - 686ms/epoch - 46ms/step 15/15 [==============================] - 0s 16ms/step
| Accuracy | Recall | Precision | F1 | |
|---|---|---|---|---|
| 0 | 0.557895 | 0.557895 | 0.580586 | 0.543705 |
Observations
# Visualizing the predicted and correct label of images from test data
rows = 1
cols = 5
for i in range(cols):
for j in range(rows):
random_index = np.random.randint(0, 400)
plt.imshow(X_test[random_index])
plt.show()
print('Predicted Label', enc.inverse_transform(new_model.predict((X_test_normalized[random_index].reshape(1,64,64,3)))))
print('True Label', enc.inverse_transform(y_test_encoded)[random_index])
1/1 [==============================] - 0s 250ms/step Predicted Label ['Common wheat'] True Label Common wheat
1/1 [==============================] - 0s 22ms/step Predicted Label ['Maize'] True Label Maize
1/1 [==============================] - 0s 20ms/step Predicted Label ['Scentless Mayweed'] True Label Scentless Mayweed
1/1 [==============================] - 0s 20ms/step Predicted Label ['Charlock'] True Label Small-flowered Cranesbill
1/1 [==============================] - 0s 21ms/step Predicted Label ['Scentless Mayweed'] True Label Shepherds Purse
Inference:
# training performance comparison
models_train_comp_df = pd.concat(
[
model0_TrainDF.T,
model1_TrainDF.T,
model2_TrainDF.T,
model3_TrainDF.T,
],
axis=1,
)
models_train_comp_df.columns = [
"Model 0 - Simple CNN",
"Model 1 - CNN with Hypertuning",
"Model 2 - CNN with Data Augmentation",
"Model 3 - CNN with Transfer Learning",
]
print("Training performance comparison:")
print(models_train_comp_df.T)
# Testing performance comparison
models_test_comp_df = pd.concat(
[
model0_TestDF.T,
model1_TestDF.T,
model2_TestDF.T,
model3_TestDF.T,
],
axis=1,
)
models_test_comp_df.columns = [
"Model 0 - Simple CNN",
"Model 1 - CNN with Hypertuning",
"Model 2 - CNN with Data Augmentation",
"Model 3 - CNN with Transfer Learning",
]
print("\n\n")
print("Testing performance comparison:")
print(models_test_comp_df.T)
Training performance comparison:
Accuracy Recall Precision F1
Model 0 - Simple CNN 1.000000 1.000000 1.000000 1.000000
Model 1 - CNN with Hypertuning 0.973746 0.973746 0.974874 0.973941
Model 2 - CNN with Data Augmentation 0.946452 0.946452 0.947343 0.946507
Model 3 - CNN with Transfer Learning 0.624123 0.624123 0.646204 0.608608
Testing performance comparison:
Accuracy Recall Precision F1
Model 0 - Simple CNN 0.732632 0.732632 0.729495 0.727110
Model 1 - CNN with Hypertuning 0.886316 0.886316 0.894756 0.887596
Model 2 - CNN with Data Augmentation 0.911579 0.911579 0.914286 0.912290
Model 3 - CNN with Transfer Learning 0.557895 0.557895 0.580586 0.543705
Inferences: